forked from hyperledger-iroha/iroha
-
Notifications
You must be signed in to change notification settings - Fork 0
1825 lines (1790 loc) · 83.3 KB
/
build-iroha1.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
## DO NOT EDIT
## Generated from build-iroha1.src.yml with make-workflows.sh
name: Iroha1
## IMPORTANT
## This workflow does not run for forks, check build-iroha1-fork for pull requests from forks
## TODO IMPORTANT DISALLOW deploying tags and main and develop builds where skip_testing was set.
## TODO 1. [vcpkg,optimization-space,optimization-speed]
## Build only Debug or only Release - reduce vcpkg build duration and output size 2times
## see directory triplets/, `vcpkg help triplets` and link: https://stackoverflow.com/a/52781832/3743145
## TODO 2. [vcpkg] Do not rebuild vcpkg-tool every time [takes about 1min], see build_iroha_deps.sh
## TODO 3. [vcpkg] Use binarycaching on CI, instead of current cache files step, (issue with gh token permissions and PRs from forks)
## https://devblogs.microsoft.com/cppblog/vcpkg-accelerate-your-team-development-environment-with-binary-caching-and-manifests/
## TODO 3. [speed,hard-task] better caching utilizing diff-backup style tools like restic and rclone
## to improve performance and reduce storage consumption.
## Store ccache with rclone, maybe Store vcpkg binarycache with rclone
## The problem/pitfail is to get access token during build from fork.
## TODO 4. [speed] Self-hosted MacOS (they are not cheap)
## Need more powerful Mac machine to reduce build time from 40min to 10min with hot vcpkg cache,
## and from 2hrs to 27min without cache
## GitHub's default runners also idles much time before started when build often
## TODO 5. [speed,optimization,resources] Cancel previous runs if their duration is less than 10 minutes, protect almost done builds from being killed
## TODO [prettify,documentation] update status badges in /README.md
## TODO [minor] windows
## TODO actions/create-release for main branch and tags
## TODO [cmake,dockerimage,iroha-builder] To improve speed of vcpkg step install to iroha-builder
## docker image cmake 3.20.1 or later and ninja 1.10.1 or later.
## TODO actions/create-release for tags
## PITFAILS
## * checkout issue on self-hosted runner with Docker, see https://github.com/actions/runner/issues/434
## KNOWN ISSUES and SETUP FEATURES
## * GitHub-hosted runners has quote and have different machine power (CPU,RAM,disk), so build and execution time could vary several times.
## * It could take much time waiting for GitHub-hosted runner available, especially if the repo's quote exceeded.
## * Our self-hosted runners are hosted on AWS via terraform-aws-github-runner, configured by tg:@bulat5 and tg:@Sofiane_bnh.
## * AWS-hosted runners are spot instances with gh-runner installed, they are awaken when job handling requested by GH Actions,
## and are dropped when idling longer than timeout (few minutes).
## * AWS-hosted runners could respond slowly, it often takes about 1 minute or more to prepare runner, if it was not idling.
## * AWS-hosted runners has maximum active runners quote, ask @bulat5 or @Sofiane_bnh to increase if required.
## * Our AWS-hosted runners are shared with Iroha2
## * Docker containers must run as root, see checkout issue https://github.com/actions/runner/issues/434
## TODO make these different workflows - reduce number of conditionals inside jobs like 'step_detect_commented_pr'
on:
push:
branches: [main, support/1.*, edge, develop]
tags: '**'
pull_request:
branches: [main, support/1.*, edge, develop] ## target branches
workflow_dispatch:
## NOTE: Able to run via cmdline: gh workflow run Iroha1
inputs:
build_spec:
description: |
See chatops-gen-matrix.sh, example "/build ubuntu macos gcc-9 burrow"
EXAMPLE build_spec:
/build ubuntu release gcc10
/build macos llvm release; /build macos clang ursa release
/build all
/build ubuntu all ## build all possible configurations on Ubuntu
/build ubuntu burrow all ## build release and debug on Ubuntu with Burrow
AVAILABLE build_spec keywords:
ubuntu|linux
macos
windows
normal
burrow
ursa
release|Release
debug|Debug
gcc|gcc-9|gcc9
gcc-10|gcc10
clang|clang-10|clang10
llvm
skip-testing|skip_testing
all|everything|beforemerge|before_merge|before-merge|readytomerge|ready-to-merge|ready_to_merge
required: true
default: '/build skip_testing '
# issue_comment:
# types: [created, edited]
jobs:
## This workflow is created for everything except pull requests from forks
## This job allows to skip the workflow completely for pull_requests from forks
check_if_pull_request_comes_from_the_same_repo:
runs-on: ubuntu-20.04 #ubuntu-latest
name: Pull requests from forks should use other workflow
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
steps:
- name: Show context
run: |
echo "::group::GitHub context"
cat <<'END'
${{ toJson(github) }}
END
echo "::endgroup::"
echo "::group::GitHub needs"
cat <<'END'
${{ toJson(needs) }}
END
echo "::endgroup::"
## GitHub Actions Workflow does not support yaml anchors
## and that is why there is a workaround with make-workflows.sh
## You should `pre-commit install` or use `pre-commit-hook.sh`,
## anyway please read .github/README.md
check_workflow_yaml_coressponds_to_src_yaml:
needs: check_if_pull_request_comes_from_the_same_repo
runs-on: ubuntu-20.04 #ubuntu-latest
name: Check if github workflows were properly made from sources
steps:
- name: REF and SHA of commented PR to ENV
if: github.event.comment
run: >
curl -fsSL ${{github.event.issue.pull_request.url}} -H "Authorization: token ${{github.token}}" | jq -r '
"PR_REF="+.head.ref,
"PR_SHA="+.head.sha,
"PR_NUM="+(.number|tostring),
"PR_REPO="+.head.repo.full_name' >>$GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{env.PR_REF}} ## not empty on issue_comment, else default value GITHUB_REF
repository: ${{env.PR_REPO}} ## not empty on issue_comment, else default value github.repository
- run: sudo snap install yq
- name: Check if .github/workflows/*.yml correspond to *.src.yml
run: |
set -x
[[ $(./.github/make-workflows.sh -x --worktree) = *"everything is up to date" ]]
## Just to react to valid comment with rocket
pr_comment_reaction_rocket:
## Just to react to valid comment with rocket
runs-on: ubuntu-20.04 #ubuntu-latest
if: ${{ github.event.comment && github.event.issue.pull_request && startsWith(github.event.comment.body, '/build') }}
steps:
- name: Show context
run: |
echo "::group::GitHub context"
cat <<'END'
${{ toJson(github) }}
END
echo "::endgroup::"
echo "::group::GitHub needs"
cat <<'END'
${{ toJson(needs) }}
END
echo "::endgroup::"
- name: Reaction
run: |
# send reaction to comment to show build was triggered
curl ${{github.event.comment.url}}/reactions \
-X POST \
-d '{"content":"rocket"}' \
-H "Accept: application/vnd.github.squirrel-girl-preview+json" \
-H "Authorization: token ${{github.token}}"
## This job is to generate build matrixes for build jobs
## The matrixes depend on what is requeted to be build
## At the moment there are several options:
## - default on pushes, pull requests
## - on comment to pull request according to comment message (chat-ops)
## - on workflow_dispatch according to its build_spec
## - on schedule '/build all'
generate_matrixes:
needs: check_if_pull_request_comes_from_the_same_repo
runs-on: ubuntu-20.04 #ubuntu-latest
#container: ubuntu:latest
if: ${{ (github.event_name != 'comment') || ( github.event.comment && github.event.issue.pull_request && startsWith(github.event.comment.body, '/build') ) }}
steps:
- name: Show context
run: |
echo "::group::GitHub context"
cat <<'END'
${{ toJson(github) }}
END
echo "::endgroup::"
echo "::group::GitHub needs"
cat <<'END'
${{ toJson(needs) }}
END
echo "::endgroup::"
- name: REF and SHA of commented PR to ENV
if: github.event.comment
run: >
curl -fsSL ${{github.event.issue.pull_request.url}} -H "Authorization: token ${{github.token}}" | jq -r '
"PR_REF="+.head.ref,
"PR_SHA="+.head.sha,
"PR_NUM="+(.number|tostring),
"PR_REPO="+.head.repo.full_name' >>$GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{env.PR_REF}} ## not empty on issue_comment, else default value GITHUB_REF
repository: ${{env.PR_REPO}} ## not empty on issue_comment, else default value github.repository
- name: Generate matrix for build triggered by chat-ops - comment to PR
if: github.event.issue.pull_request && github.event.comment
id: comment_body
run: echo "${{github.event.comment.body}}" >/tmp/comment_body
- name: Generate default matrix for regular builds
if: ${{ steps.comment_body.outcome == 'skipped' }} ## i.e. not github.event.issue.pull_request
run: |
set -x
git fetch origin ${{github.event.pull_request.head.sha}} --depth=2 ## depth=2 to detect if fetched commit is merge commit
git log -1 FETCH_HEAD
commit_message_body_build_spec(){
git log -n1 $1 --format=%B | grep '^/build '
}
git_is_merge_commit(){
git rev-parse ${1:-HEAD}^2 &>/dev/null
}
commit_was_merged_build_spec(){
git_is_merge_commit $1 &&
git log -n1 $1 --format=%s | grep -q '^Merge branch' &&
echo "/build before-merge"
}
case ${{github.event_name}} in
pull_request) if commit_message_body_build_spec FETCH_HEAD >/tmp/comment_body ;then
if git_is_merge_commit FETCH_HEAD ;then
echo ::warning::'/build directive in merge commit overrides default "/build before-merge"'
fi
elif commit_was_merged_build_spec FETCH_HEAD >/tmp/comment_body ;then
true
else
#echo >/tmp/comment_body "/build debug; /build ubuntu release debug normal"
echo >/tmp/comment_body "/build ubuntu debug release normal gcc-10"$'\n' "/build macos debug clang"
fi ;;
push) commit_message_body_build_spec >/tmp/comment_body || {
echo "/build ubuntu debug release normal gcc-10"
} >/tmp/comment_body ;;
workflow_dispatch) echo >/tmp/comment_body "${{github.event.inputs.build_spec}}" ;;
*) echo >&2 "::error::Unexpected event"; false ;;
esac
- name: Generate matrixes
id: matrixes
run: |
set -x
cat /tmp/comment_body
cat /tmp/comment_body | .github/chatops-gen-matrix.sh
echo "::set-output name=matrix_ubuntu::$(cat matrix_ubuntu)"
echo "::set-output name=matrix_ubuntu_release::$(cat matrix_ubuntu_release)"
echo "::set-output name=matrix_ubuntu_debug::$(cat matrix_ubuntu_debug)"
echo "::set-output name=matrix_macos::$(cat matrix_macos)"
echo "::set-output name=matrix_windows::$(cat matrix_windows)"
echo "::set-output name=matrix_dockerimage_release::$(cat matrix_dockerimage_release)"
echo "::set-output name=matrix_dockerimage_debug::$(cat matrix_dockerimage_debug)"
##TODO report errors and warnings as answer to issue comment (chat-ops)
- name: Reaction confused
if: failure() && github.event.comment
run: |
# send reaction to comment to show build was triggered
curl ${{github.event.comment.url}}/reactions \
-X POST \
-d '{"content":"confused"}' \
-H "Accept: application/vnd.github.squirrel-girl-preview+json" \
-H "Authorization: token ${{github.token}}"
- name: Reaction rocket
if: github.event.comment
run: |
# send reaction to comment to show build was triggered
curl ${{github.event.comment.url}}/reactions \
-X POST \
-d '{"content":"rocket"}' \
-H "Accept: application/vnd.github.squirrel-girl-preview+json" \
-H "Authorization: token ${{github.token}}"
outputs:
matrix_ubuntu: ${{steps.matrixes.outputs.matrix_ubuntu}}
matrix_ubuntu_release: ${{steps.matrixes.outputs.matrix_ubuntu_release}}
matrix_ubuntu_debug: ${{steps.matrixes.outputs.matrix_ubuntu_debug}}
matrix_macos: ${{steps.matrixes.outputs.matrix_macos}}
matrix_windows: ${{steps.matrixes.outputs.matrix_windows}}
matrix_dockerimage_release: ${{steps.matrixes.outputs.matrix_dockerimage_release}}
matrix_dockerimage_debug: ${{steps.matrixes.outputs.matrix_dockerimage_debug}}
## Build docker image named 'hyperledger/iroha-builder' with all stuff to compile iroha and its dependancies
## The result docker image is pushed with tags :pr-NUMBER, :commit-HASH, :branch-name, :tag-name,
## and conditional tags :edge for development branch, and :latest for git-tags.
Docker-iroha-builder:
needs: check_workflow_yaml_coressponds_to_src_yaml
runs-on: ubuntu-20.04 #ubuntu-latest #[ self-hosted, Linux ]
env:
DOCKERHUB_ORG: hyperledger ## Must be hyperledger, also can use iroha1, cannot use ${{ secrets.DOCKERHUB_ORG }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Show context
run: |
echo "::group::GitHub context"
cat <<'END'
${{ toJson(github) }}
END
echo "::endgroup::"
echo "::group::GitHub needs"
cat <<'END'
${{ toJson(needs) }}
END
echo "::endgroup::"
- name: System info
run: |
set -x
whoami
id $(whoami)
free || vm_stat | perl -ne '/page size of (\d+)/ and $size=$1;
/Pages\s+([^:]+)[^\d]+(\d+)/ and printf("%-16s % 16.2f Mi\n", "$1:", $2 * $size / 1048576);'
df -h
- name: Build info
run: |
cat << 'END'
ref:${{github.ref}}
sha:${{github.sha}}
run_number:${{github.run_number}}
event_name:${{github.event_name}}
event.action:${{github.event.action}}
event.issue.number:${{ github.event.issue.number }}
END
- name: REF and SHA of commented PR to ENV
if: github.event.comment
run: >
curl -fsSL ${{github.event.issue.pull_request.url}} -H "Authorization: token ${{github.token}}" | jq -r '
"PR_REF="+.head.ref,
"PR_SHA="+.head.sha,
"PR_NUM="+(.number|tostring),
"PR_REPO="+.head.repo.full_name' >>$GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{env.PR_REF}} ## not empty on issue_comment, else default value GITHUB_REF
repository: ${{env.PR_REPO}} ## not empty on issue_comment, else default value github.repository
- name: Determine dockertag
id: dockertag
env:
dockertag: ${{ hashFiles('docker/iroha-builder/**') }}
run: |
echo "::set-output name=dockertag::$dockertag"
echo >>$GITHUB_ENV dockertag=$dockertag
test -n "$DOCKERHUB_ORG" || {
echo ::error::"DOCKERHUB_ORG must contain value"
false
}
- name: Login to DockerHub
if: ${{ env.DOCKERHUB_TOKEN != '' && env.DOCKERHUB_USERNAME != '' }}
id: docker_login
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the Container registry GHCR
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Possible WARNING
if: ${{ steps.docker_login.outcome == 'skipped' }}
run: echo "::warning::DOCKERHUB_TOKEN and DOCKERHUB_USERNAME are empty. Will build but NOT push."
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: ${{ env.DOCKERHUB_ORG }}/iroha-builder
tags: |
type=raw,value=${{env.dockertag}}
type=ref,event=branch
type=ref,event=pr
type=ref,event=tag
type=schedule
## Docker image will be pushed with tags:
## - hash of file Dockerfile.builder
## - branchname, when branch is pushed
## - pr-NUMBER, when pushed to PR
## - git tag when tag is pushed
## - schedule, see the docs
- uses: docker/metadata-action@v3
name: Docker meta GHCR
id: meta_ghcr
with:
tags: |
type=raw,value=${{env.dockertag}}
type=ref,event=branch
type=ref,event=pr
type=ref,event=tag
type=schedule
## Docker image will be pushed with tags:
## - hash of file Dockerfile.builder
## - branchname, when branch is pushed
## - pr-NUMBER, when pushed to PR
## - git tag when tag is pushed
## - schedule, see the docs
images: ghcr.io/${{ github.repository }}-builder
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- # - &step_docker_cache
# name: Cache Docker layers
# uses: actions/cache@v2
# with:
# path: /tmp/.buildx-cache
# key: ${{ runner.os }}-buildx-${{env.dockertag}}
# restore-keys: ${{ runner.os }}-buildx-
id: build_and_push
name: Build and push
uses: docker/build-push-action@v2
with:
context: docker/iroha-builder/
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
push: ${{ steps.docker_login.outcome == 'success' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- uses: docker/build-push-action@v2
id: build_and_push_ghcr
name: Build and push to GHCR
with:
context: docker/iroha-builder/
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
push: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo == github.event.pull_request.base.repo }}
tags: ${{ steps.meta_ghcr.outputs.tags }}
labels: ${{ steps.meta_ghcr.outputs.labels }}
# - &step_docker_move_cache
# # Temp fix
# # https://github.com/docker/build-push-action/issues/252
# # https://github.com/moby/buildkit/issues/1896
# name: Move cache
# run: |
# rm -rf /tmp/.buildx-cache
# mv /tmp/.buildx-cache-new /tmp/.buildx-cache
- name: Check if dockertaghash exists in remote registry
id: dockertag_already
run: |
echo "::set-output name=container::$DOCKERHUB_ORG/iroha-builder:$dockertag"
docker pull "$DOCKERHUB_ORG/iroha-builder:$dockertag"
outputs:
## WARN secret dropped from output!, output may not contain secret,
## and secret cannot be used in job:container directly, and there is no github non-secret variables...
## if dockertag is already pushed then use it. But let it be empty when tag does not exist remotely.
dockertag: ${{steps.dockertag.outputs.dockertag}}
container: ${{steps.dockertag_already.outputs.container}}
pushed: ${{ steps.docker_login.outcome == 'success' && steps.build_and_push.outcome == 'success' }}
## Build iroha in a container made of the image earlier prepared
## Result artifacts are
## - stand-alone irohad (linked statically)
## - iroha.deb (with irohad, migration-tool, wsv_checker inside)
build-UD:
needs:
- Docker-iroha-builder
- generate_matrixes
runs-on: [self-hosted, Linux, iroha]
container: ## Container is taken from previous job
image: ${{needs.Docker-iroha-builder.outputs.container}}
options: --user root
strategy:
fail-fast: false
matrix: ${{ fromJSON( needs.generate_matrixes.outputs.matrix_ubuntu_debug ) }}
if: ${{ fromJSON( needs.generate_matrixes.outputs.matrix_ubuntu_debug ).include[0] }}
defaults:
run:
shell: bash
steps:
- name: Show context
run: |
echo "::group::GitHub context"
cat <<'END'
${{ toJson(github) }}
END
echo "::endgroup::"
echo "::group::GitHub needs"
cat <<'END'
${{ toJson(needs) }}
END
echo "::endgroup::"
- name: Show needs
run: |
cat >/dev/null <<'END'
${{ toJson(needs) }}
END
- env:
container: ${{needs.Docker-iroha-builder.outputs.container}}
run: test -n "$container"
- name: System info
run: |
set -x
whoami
id $(whoami)
free || vm_stat | perl -ne '/page size of (\d+)/ and $size=$1;
/Pages\s+([^:]+)[^\d]+(\d+)/ and printf("%-16s % 16.2f Mi\n", "$1:", $2 * $size / 1048576);'
df -h
- name: Build info
run: |
cat << 'END'
ref:${{github.ref}}
sha:${{github.sha}}
run_number:${{github.run_number}}
event_name:${{github.event_name}}
event.action:${{github.event.action}}
event.issue.number:${{ github.event.issue.number }}
END
- name: export CC,BuildType from matrix.buildspec
run: |
echo >>$GITHUB_ENV OS=$(echo ${{matrix.buildspec}} | awk '{print $1}')
echo >>$GITHUB_ENV CC_NAME=$(echo ${{matrix.buildspec}} | awk '{print $2}')
echo >>$GITHUB_ENV BuildType=$(echo ${{matrix.buildspec}} | awk '{print $3}')
features=$(echo ${{matrix.buildspec}} | awk '{print $4}')
case $features in
normal) echo >>$GITHUB_ENV CMAKE_USE=""; features= ;;
ursa) echo >>$GITHUB_ENV CMAKE_USE="-DUSE_LIBURSA=ON";;
burrow) echo >>$GITHUB_ENV CMAKE_USE="-DUSE_BURROW=ON";;
*) echo "::error::Unknown features '$features'"; false ;;
esac
echo >>$GITHUB_ENV features="$features"
echo >>$GITHUB_ENV skip_testing=$(echo ${{matrix.buildspec}} | grep -Fo skip_testing)
- name: REF and SHA of commented PR to ENV
if: github.event.comment
run: >
curl -fsSL ${{github.event.issue.pull_request.url}} -H "Authorization: token ${{github.token}}" | jq -r '
"PR_REF="+.head.ref,
"PR_SHA="+.head.sha,
"PR_NUM="+(.number|tostring),
"PR_REPO="+.head.repo.full_name' >>$GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{env.PR_REF}} ## not empty on issue_comment, else default value GITHUB_REF
repository: ${{env.PR_REPO}} ## not empty on issue_comment, else default value github.repository
fetch-depth: 0 ## full history
- name: export CC and CXX
env:
CCACHE_PATH: /usr/lib/ccache
run: |
set -xeu #o pipefail
if test $CC_NAME = llvm
then CC=/usr/local/opt/llvm/bin/clang
else CC=$CC_NAME
fi
echo >>$GITHUB_ENV CC=$CC
echo >>$GITHUB_ENV CXX=$(echo $CC | sed -es,gcc,g++, -es,clang,clang++,)
echo >>$GITHUB_PATH $CCACHE_PATH
ls -lA $CCACHE_PATH
$(realpath $CCACHE_PATH/gcc) --show-config
echo >>$GITHUB_ENV _CCACHE_DIR=$($(realpath $CCACHE_PATH/gcc) --show-config | sed -nE 's,.*cache_dir = ,,p')
echo >>$GITHUB_ENV NPROC=$(nproc | awk '{printf("%.0f",$1*0.77)}')
echo >>$GITHUB_ENV HOME=$HOME
- # - &step_restore_ccache
# name: Restore cache CCache
# uses: actions/cache@v2
# with:
# path: ${{ env._CCACHE_DIR }}
# key: ${{runner.os}}-ccache-${{ github.event.pull_request.head.sha }}
# restore-keys: ${{runner.os}}-ccache-
# - &step_store_ccache_stats
# run: ccache --show-stats | tee /tmp/ccache-stats
# - &step_vcpkg_cache
# ## Read the docs https://vcpkg.readthedocs.io/en/latest/users/binarycaching/ https://github.com/microsoft/vcpkg/blob/master/docs/users/binarycaching.md
# name: Restore cache Vcpkg binarycache ## NOTE not useng NuGet because on ubuntu nuget needs mono of 433MB, unusable.
# uses: actions/cache@v2
# with:
# path: |
# ${{env.HOME}}/.cache/vcpkg/archives
# key: ${{runner.os}}-vcpkg-${{env.CC_NAME}}.${{ hashFiles('vcpkg/**') }}
# restore-keys: ${{runner.os}}-vcpkg-${{env.CC_NAME}}.
name: Build iroha vcpkg dependancies
run: ./vcpkg/build_iroha_deps.sh $PWD/vcpkg-build; test -f $PWD/vcpkg-build/scripts/buildsystems/vcpkg.cmake
## Takes 48m16s on default GitHub runner with 2 cores
## Takes 13m41s on self-hosted AWS EC2 c5.x4large
# ________________________________________________________
# Executed in 32,08 mins fish external
# usr time 110,52 mins 0,24 millis 110,52 mins
# sys time 12,26 mins 1,34 millis 12,26 mins
#
# All requested packages are currently installed.
# ________________________________________________________
# Executed in 3,17 secs fish external
# usr time 2,05 secs 128,00 micros 2,05 secs
# sys time 0,70 secs 575,00 micros 0,70 secs
- name: CMake configure
## Takes 13s on regular GitHub runner
run: cmake -B build -DCMAKE_TOOLCHAIN_FILE=$PWD/vcpkg-build/scripts/buildsystems/vcpkg.cmake -DCMAKE_BUILD_TYPE=${{ env.BuildType }} -GNinja $CMAKE_USE -DTESTING=$( test "$skip_testing" = skip_testing && echo OFF || echo ON ) -DBENCHMARKING=$( test "$skip_testing" = skip_testing && echo OFF || echo ON ) -DPACKAGE_DEB=ON
#-DCMAKE_VERBOSE_MAKEFILE=ON ## Note: use for debug
- name: CMake build
run: |
set -x
## reduce memory usage to do not overflow
cmake --build build --config ${{ env.BuildType }} -- -j$(nproc | awk '{printf("%.0f",$1*0.77)}')
echo ::notice::"$(./build/bin/irohad --version)"
## Debug takes 18m44s on regular GitHub runner
## Debug takes 7m41s on self-hosted AWS EC2 c5.x4large
## Release takes 2m58s on self-hosted AWS EC2 c5.x4large
- name: CPack (linux only)
run: cd build; cpack; ## cmake --build build --target package
- # - &step_compare_ccache_stats
# run: ccache --show-stats | diff --side-by-side /tmp/ccache-stats - ||true
name: Show free space and disk usage
if: ${{ always() }}
run: |
df -h || true
- name: Generate artifact suffix depending on matrix to env.ARTIFACT_SUFFIX
run: |
set -x
cc=$(echo $CC_NAME | sed -Ee's,[-/],,g' )
build_type=$(echo $BuildType | tr A-Z a-z | sed -E -es,debug,dbg, -es,release,rel, )
test $build_type = dbg -o $build_type = rel
uses=$(echo "$features" | tr ' ' - | tr A-Z a-z)
_os=${OS:+-$OS} _cc=${cc:+-$cc} _build_type=${build_type:+-$build_type} _uses=${uses:+-$uses}
echo >>$GITHUB_ENV ARTIFACT_SUFFIX=$_os$_cc$_build_type$_uses
echo >>$GITHUB_ENV _uses_suffix=$_uses
echo >>$GITHUB_ENV _compiler_suffix=$(test $cc != gcc9 && echo $_cc)
echo >>$GITHUB_ENV _debug_suffix=$(test "$build_type" = dbg && echo -debug || true)
- name: Upload artifact irohad
uses: actions/upload-artifact@v2
with:
name: irohad${{env.ARTIFACT_SUFFIX}}
path: |
build/bin/irohad
build/bin/iroha-cli
- name: Upload artifact iroha-deb
uses: actions/upload-artifact@v2
with:
name: iroha-deb${{env.ARTIFACT_SUFFIX}}
path: |
build/*.deb
- if: ${{ false }} ## Maybe test in another job
name: Upload artifact tests
uses: actions/upload-artifact@v2
with:
name: iroha-tests-ubuntu${{env.ARTIFACT_SUFFIX}}
path: |
build/test_bin/**
build/test_data/**
- timeout-minutes: 40
if: env.skip_testing == ''
name: CTest
run: |
echo ::group::'boilerplate'
set -euo pipefail
if test $(uname) = Darwin ;then
## This is a common portable solution, but Debian and Ubuntu have their own wrappers
initdb --locale=C --encoding=UTF-8 --username=postgres $PWD/postgres_database
postgres -D $PWD/postgres_database -p5432 2>&1 >/tmp/postgres.log & { sleep .3; kill -0 $!; } ## use pg_ctl no need &
else ## Debian or Ubuntu
## Need to go debian-specific way because
## initdb is not allowed to be run as root, but we need to run as root
## because GitHub actions runners have much issues with permissions.
mkdir postgres_database && chown iroha-ci postgres_database
echo /usr/lib/postgresql/12/bin/initdb --locale=C --encoding=UTF-8 --username=postgres $PWD/postgres_database | su iroha-ci
echo /usr/lib/postgresql/12/bin/pg_ctl start -D $PWD/postgres_database --log=$PWD/postgres_database/log | su iroha-ci
# ## Need to go debian-specific way because
# ## initdb is not allowed to be run as root, but we need to run as root
# ## because GitHub actions runners have much issues with permissions.
# cat <<END >/etc/postgresql/12/main/pg_hba.conf
# # TYPE DATABASE USER ADDRESS METHOD
# local all all trust
# host all all 127.0.0.1/32 trust
# host all all ::1/128 trust
# local replication all trust
# host replication all 127.0.0.1/32 trust
# host replication all ::1/128 trust
# END
# pg_ctlcluster 12 main start ## Cluster 'main' exist by default
# #OR pg_createcluster -p 5432 --start 12 iroha -- --locale=C --encoding=UTF-8 --username=postgres
fi
cd build
## This is just a small lifehack to TEMPORARY allow some tests to fail
cat ../.github/TESTS_ALLOWED_TO_FAIL | sort -u >ALLOW_TO_FAIL || true
if test -e ALLOW_TO_FAIL
then echo "::warning:: There are TESTS_ALLOWED_TO_FAIL: "$(cat ALLOW_TO_FAIL)
fi
grep_failed_tests(){
grep 'The following tests FAILED:' -A10000 "$@" | tail +2 #| cut -d- -f2 | cut -d' ' -f2 | sort
}
exclude_allowed_to_fail(){
grep -Fvf ALLOW_TO_FAIL "$@"
}
only_allowed_to_fail(){
grep -Ff ALLOW_TO_FAIL "$@"
}
list_to_line(){
comma=''
while read N d name sta ;do
echo -n "$comma$N-$name$sta"
comma=', '
done
}
echo ::endgroup::
## Run module_* tests in parallel and others subsequently
## Cathgories sorted in order of importancy
CTEST_CATHEGORIES=( module tool framework regression system integration )
## Add rest available cathgories
CTEST_CATHEGORIES+=( $(
ctest --show-only=json-v1 -R "^(module|tool|framework|regression|system|integration)" |
jq -r .tests[].name |
cut -f1 -d_ |
sort -u |
grep -Fvf <( printf '%s\n' ${CTEST_CATHEGORIES[@]} )
)
) || true
CTEST_DEFAULT_timeout=80
CTEST_module_timeout=120 CTEST_module_parallel=4
CTEST_tool_timeout=200
CTEST_integration_timeout=120
CTEST_integration_args='--repeat until-pass:10' ## FIXME remove this hack
CTEST_system_args='--repeat until-pass:10' ## FIXME remove this hack
for cathegory in ${CTEST_CATHEGORIES[@]} ;do
echo >&2 ::group::"$cathegory tests"
set -x
timeout_name=CTEST_${cathegory}_timeout; timeout=${!timeout_name:-$CTEST_DEFAULT_timeout}
parallel_name=CTEST_${cathegory}_parallel; parallel=${!parallel_name:-}
args_name=CTEST_${cathegory}_args; args=${!args_name:-}
ctest -R "^${cathegory}_" ${parallel:+--parallel $parallel} --output-on-failure --no-tests=error --timeout $timeout ${args:-} \
| tee ctest_$cathegory.out \
|| true
set +x
echo >&2 ::endgroup::
done
tests_passed=true
for t in ${CTEST_CATHEGORIES[@]} ;do
f=ctest_$t.out
if a=$(grep_failed_tests $f | exclude_allowed_to_fail | list_to_line) ;then
echo "::error::The following $(echo $t | tr a-z A-Z) tests FAILED but not in list ALLOW_TO_FAIL: $a"
tests_passed=false
fi
if o=$(grep_failed_tests $f | only_allowed_to_fail | list_to_line) ;then
echo "::warning::The following $t tests FAILED and ALLOWED TO FAIL: $o"
fi
done
$tests_passed
## Just because release is built 2..3 times faster make it a different job
build-UR:
needs:
- Docker-iroha-builder
- generate_matrixes
runs-on: [self-hosted, Linux, iroha]
container: ## Container is taken from previous job
image: ${{needs.Docker-iroha-builder.outputs.container}}
options: --user root
defaults:
run:
shell: bash
steps:
- name: Show context
run: |
echo "::group::GitHub context"
cat <<'END'
${{ toJson(github) }}
END
echo "::endgroup::"
echo "::group::GitHub needs"
cat <<'END'
${{ toJson(needs) }}
END
echo "::endgroup::"
- name: Show needs
run: |
cat >/dev/null <<'END'
${{ toJson(needs) }}
END
- env:
container: ${{needs.Docker-iroha-builder.outputs.container}}
run: test -n "$container"
- name: System info
run: |
set -x
whoami
id $(whoami)
free || vm_stat | perl -ne '/page size of (\d+)/ and $size=$1;
/Pages\s+([^:]+)[^\d]+(\d+)/ and printf("%-16s % 16.2f Mi\n", "$1:", $2 * $size / 1048576);'
df -h
- name: Build info
run: |
cat << 'END'
ref:${{github.ref}}
sha:${{github.sha}}
run_number:${{github.run_number}}
event_name:${{github.event_name}}
event.action:${{github.event.action}}
event.issue.number:${{ github.event.issue.number }}
END
- name: export CC,BuildType from matrix.buildspec
run: |
echo >>$GITHUB_ENV OS=$(echo ${{matrix.buildspec}} | awk '{print $1}')
echo >>$GITHUB_ENV CC_NAME=$(echo ${{matrix.buildspec}} | awk '{print $2}')
echo >>$GITHUB_ENV BuildType=$(echo ${{matrix.buildspec}} | awk '{print $3}')
features=$(echo ${{matrix.buildspec}} | awk '{print $4}')
case $features in
normal) echo >>$GITHUB_ENV CMAKE_USE=""; features= ;;
ursa) echo >>$GITHUB_ENV CMAKE_USE="-DUSE_LIBURSA=ON";;
burrow) echo >>$GITHUB_ENV CMAKE_USE="-DUSE_BURROW=ON";;
*) echo "::error::Unknown features '$features'"; false ;;
esac
echo >>$GITHUB_ENV features="$features"
echo >>$GITHUB_ENV skip_testing=$(echo ${{matrix.buildspec}} | grep -Fo skip_testing)
- name: REF and SHA of commented PR to ENV
if: github.event.comment
run: >
curl -fsSL ${{github.event.issue.pull_request.url}} -H "Authorization: token ${{github.token}}" | jq -r '
"PR_REF="+.head.ref,
"PR_SHA="+.head.sha,
"PR_NUM="+(.number|tostring),
"PR_REPO="+.head.repo.full_name' >>$GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{env.PR_REF}} ## not empty on issue_comment, else default value GITHUB_REF
repository: ${{env.PR_REPO}} ## not empty on issue_comment, else default value github.repository
fetch-depth: 0 ## full history
- name: export CC and CXX
env:
CCACHE_PATH: /usr/lib/ccache
run: |
set -xeu #o pipefail
if test $CC_NAME = llvm
then CC=/usr/local/opt/llvm/bin/clang
else CC=$CC_NAME
fi
echo >>$GITHUB_ENV CC=$CC
echo >>$GITHUB_ENV CXX=$(echo $CC | sed -es,gcc,g++, -es,clang,clang++,)
echo >>$GITHUB_PATH $CCACHE_PATH
ls -lA $CCACHE_PATH
$(realpath $CCACHE_PATH/gcc) --show-config
echo >>$GITHUB_ENV _CCACHE_DIR=$($(realpath $CCACHE_PATH/gcc) --show-config | sed -nE 's,.*cache_dir = ,,p')
echo >>$GITHUB_ENV NPROC=$(nproc | awk '{printf("%.0f",$1*0.77)}')
echo >>$GITHUB_ENV HOME=$HOME
- # - &step_restore_ccache
# name: Restore cache CCache
# uses: actions/cache@v2
# with:
# path: ${{ env._CCACHE_DIR }}
# key: ${{runner.os}}-ccache-${{ github.event.pull_request.head.sha }}
# restore-keys: ${{runner.os}}-ccache-
# - &step_store_ccache_stats
# run: ccache --show-stats | tee /tmp/ccache-stats
# - &step_vcpkg_cache
# ## Read the docs https://vcpkg.readthedocs.io/en/latest/users/binarycaching/ https://github.com/microsoft/vcpkg/blob/master/docs/users/binarycaching.md
# name: Restore cache Vcpkg binarycache ## NOTE not useng NuGet because on ubuntu nuget needs mono of 433MB, unusable.
# uses: actions/cache@v2
# with:
# path: |
# ${{env.HOME}}/.cache/vcpkg/archives
# key: ${{runner.os}}-vcpkg-${{env.CC_NAME}}.${{ hashFiles('vcpkg/**') }}
# restore-keys: ${{runner.os}}-vcpkg-${{env.CC_NAME}}.
name: Build iroha vcpkg dependancies
run: ./vcpkg/build_iroha_deps.sh $PWD/vcpkg-build; test -f $PWD/vcpkg-build/scripts/buildsystems/vcpkg.cmake
## Takes 48m16s on default GitHub runner with 2 cores
## Takes 13m41s on self-hosted AWS EC2 c5.x4large
# ________________________________________________________
# Executed in 32,08 mins fish external
# usr time 110,52 mins 0,24 millis 110,52 mins
# sys time 12,26 mins 1,34 millis 12,26 mins
#
# All requested packages are currently installed.
# ________________________________________________________
# Executed in 3,17 secs fish external
# usr time 2,05 secs 128,00 micros 2,05 secs
# sys time 0,70 secs 575,00 micros 0,70 secs
- name: CMake configure
## Takes 13s on regular GitHub runner
run: cmake -B build -DCMAKE_TOOLCHAIN_FILE=$PWD/vcpkg-build/scripts/buildsystems/vcpkg.cmake -DCMAKE_BUILD_TYPE=${{ env.BuildType }} -GNinja $CMAKE_USE -DTESTING=$( test "$skip_testing" = skip_testing && echo OFF || echo ON ) -DBENCHMARKING=$( test "$skip_testing" = skip_testing && echo OFF || echo ON ) -DPACKAGE_DEB=ON
#-DCMAKE_VERBOSE_MAKEFILE=ON ## Note: use for debug
- name: CMake build
run: |
set -x
## reduce memory usage to do not overflow
cmake --build build --config ${{ env.BuildType }} -- -j$(nproc | awk '{printf("%.0f",$1*0.77)}')
echo ::notice::"$(./build/bin/irohad --version)"
## Debug takes 18m44s on regular GitHub runner
## Debug takes 7m41s on self-hosted AWS EC2 c5.x4large
## Release takes 2m58s on self-hosted AWS EC2 c5.x4large
- name: CPack (linux only)
run: cd build; cpack; ## cmake --build build --target package
- # - &step_compare_ccache_stats
# run: ccache --show-stats | diff --side-by-side /tmp/ccache-stats - ||true
name: Show free space and disk usage
if: ${{ always() }}
run: |
df -h || true
- name: Generate artifact suffix depending on matrix to env.ARTIFACT_SUFFIX
run: |
set -x
cc=$(echo $CC_NAME | sed -Ee's,[-/],,g' )
build_type=$(echo $BuildType | tr A-Z a-z | sed -E -es,debug,dbg, -es,release,rel, )
test $build_type = dbg -o $build_type = rel
uses=$(echo "$features" | tr ' ' - | tr A-Z a-z)
_os=${OS:+-$OS} _cc=${cc:+-$cc} _build_type=${build_type:+-$build_type} _uses=${uses:+-$uses}
echo >>$GITHUB_ENV ARTIFACT_SUFFIX=$_os$_cc$_build_type$_uses
echo >>$GITHUB_ENV _uses_suffix=$_uses
echo >>$GITHUB_ENV _compiler_suffix=$(test $cc != gcc9 && echo $_cc)
echo >>$GITHUB_ENV _debug_suffix=$(test "$build_type" = dbg && echo -debug || true)
- name: Upload artifact irohad
uses: actions/upload-artifact@v2
with:
name: irohad${{env.ARTIFACT_SUFFIX}}
path: |
build/bin/irohad
build/bin/iroha-cli
- name: Upload artifact iroha-deb
uses: actions/upload-artifact@v2
with:
name: iroha-deb${{env.ARTIFACT_SUFFIX}}
path: |
build/*.deb
- if: ${{ false }} ## Maybe test in another job
name: Upload artifact tests
uses: actions/upload-artifact@v2
with:
name: iroha-tests-ubuntu${{env.ARTIFACT_SUFFIX}}
path: |
build/test_bin/**
build/test_data/**
- timeout-minutes: 40
if: env.skip_testing == ''
name: CTest
run: |
echo ::group::'boilerplate'
set -euo pipefail
if test $(uname) = Darwin ;then
## This is a common portable solution, but Debian and Ubuntu have their own wrappers
initdb --locale=C --encoding=UTF-8 --username=postgres $PWD/postgres_database
postgres -D $PWD/postgres_database -p5432 2>&1 >/tmp/postgres.log & { sleep .3; kill -0 $!; } ## use pg_ctl no need &
else ## Debian or Ubuntu
## Need to go debian-specific way because
## initdb is not allowed to be run as root, but we need to run as root
## because GitHub actions runners have much issues with permissions.
mkdir postgres_database && chown iroha-ci postgres_database
echo /usr/lib/postgresql/12/bin/initdb --locale=C --encoding=UTF-8 --username=postgres $PWD/postgres_database | su iroha-ci
echo /usr/lib/postgresql/12/bin/pg_ctl start -D $PWD/postgres_database --log=$PWD/postgres_database/log | su iroha-ci
# ## Need to go debian-specific way because
# ## initdb is not allowed to be run as root, but we need to run as root
# ## because GitHub actions runners have much issues with permissions.
# cat <<END >/etc/postgresql/12/main/pg_hba.conf
# # TYPE DATABASE USER ADDRESS METHOD
# local all all trust
# host all all 127.0.0.1/32 trust
# host all all ::1/128 trust
# local replication all trust
# host replication all 127.0.0.1/32 trust
# host replication all ::1/128 trust
# END
# pg_ctlcluster 12 main start ## Cluster 'main' exist by default
# #OR pg_createcluster -p 5432 --start 12 iroha -- --locale=C --encoding=UTF-8 --username=postgres
fi
cd build
## This is just a small lifehack to TEMPORARY allow some tests to fail
cat ../.github/TESTS_ALLOWED_TO_FAIL | sort -u >ALLOW_TO_FAIL || true
if test -e ALLOW_TO_FAIL
then echo "::warning:: There are TESTS_ALLOWED_TO_FAIL: "$(cat ALLOW_TO_FAIL)
fi
grep_failed_tests(){
grep 'The following tests FAILED:' -A10000 "$@" | tail +2 #| cut -d- -f2 | cut -d' ' -f2 | sort
}
exclude_allowed_to_fail(){
grep -Fvf ALLOW_TO_FAIL "$@"
}
only_allowed_to_fail(){
grep -Ff ALLOW_TO_FAIL "$@"
}
list_to_line(){
comma=''
while read N d name sta ;do
echo -n "$comma$N-$name$sta"
comma=', '
done
}
echo ::endgroup::
## Run module_* tests in parallel and others subsequently
## Cathgories sorted in order of importancy
CTEST_CATHEGORIES=( module tool framework regression system integration )
## Add rest available cathgories
CTEST_CATHEGORIES+=( $(
ctest --show-only=json-v1 -R "^(module|tool|framework|regression|system|integration)" |
jq -r .tests[].name |
cut -f1 -d_ |
sort -u |
grep -Fvf <( printf '%s\n' ${CTEST_CATHEGORIES[@]} )
)
) || true
CTEST_DEFAULT_timeout=80
CTEST_module_timeout=120 CTEST_module_parallel=4
CTEST_tool_timeout=200
CTEST_integration_timeout=120
CTEST_integration_args='--repeat until-pass:10' ## FIXME remove this hack
CTEST_system_args='--repeat until-pass:10' ## FIXME remove this hack
for cathegory in ${CTEST_CATHEGORIES[@]} ;do
echo >&2 ::group::"$cathegory tests"
set -x
timeout_name=CTEST_${cathegory}_timeout; timeout=${!timeout_name:-$CTEST_DEFAULT_timeout}
parallel_name=CTEST_${cathegory}_parallel; parallel=${!parallel_name:-}
args_name=CTEST_${cathegory}_args; args=${!args_name:-}
ctest -R "^${cathegory}_" ${parallel:+--parallel $parallel} --output-on-failure --no-tests=error --timeout $timeout ${args:-} \