-
Notifications
You must be signed in to change notification settings - Fork 12
189 lines (172 loc) · 5.19 KB
/
ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
name: CI
on:
push:
branches:
- 'main'
tags:
- '*'
pull_request:
workflow_dispatch:
jobs:
event_file:
name: "Event File"
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v4
with:
name: Event File
path: ${{ github.event_path }}
lint:
name: "Linting"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Cache Maven packages
uses: actions/cache@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-mvn-lint-${{ hashFiles('pom.xml') }}
- name: Setup JDK ${{ inputs.java-compat-version }}
uses: actions/setup-java@v3
with:
java-version: '11'
distribution: 'zulu'
- name: Check
id: check
run: |
mvn --batch-mode spotless:check
shell: bash
- name: Changes
if: failure() && steps.check.outcome == 'failure'
run: |
mvn --batch-mode spotless:apply
git diff
shell: bash
download:
name: "Spark"
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# use spark versions from test-integration.yaml workflow
include:
- spark-version: '3.0.3'
hadoop-version: '2.7'
- spark-version: '3.1.3'
hadoop-version: '2.7'
- spark-version: '3.2.4'
hadoop-version: '2.7'
- spark-version: '3.3.4'
hadoop-version: '3'
- spark-version: '3.4.3'
hadoop-version: '3'
- spark-version: '3.5.3'
hadoop-version: '3'
- spark-version: '4.0.0-preview2'
hadoop-version: '3'
steps:
- name: Cache Spark Binaries
uses: actions/cache@v4
with:
path: ~/spark
key: ${{ runner.os }}-spark-binaries-${{ inputs.spark-version }}-${{ inputs.scala-compat-version }}
- name: Setup Spark Binaries
env:
SPARK_PACKAGE: spark-${{ inputs.spark-version }}/spark-${{ inputs.spark-version }}-bin-hadoop${{ inputs.hadoop-version }}.tgz
run: |
if [[ ! -e ~/spark ]]
then
wget --progress=dot:giga "https://www.apache.org/dyn/closer.lua/spark/${SPARK_PACKAGE}?action=download" -O - | tar -xzC "${{ runner.temp }}"
archive=$(basename "${SPARK_PACKAGE}") bash -c "mv -v "${{ runner.temp }}/\${archive/%.tgz/}" ~/spark"
fi
shell: bash
- name: Upload Spark Binaries
uses: actions/upload-artifact@v4
with:
name: Spark-Binaries-${{ inputs.spark-version }}-${{ inputs.hadoop-version }}
path: ~/spark
build:
name: "Build"
uses: "./.github/workflows/build.yml"
test-dgraph:
name: "Test Dgraph"
needs: build
uses: "./.github/workflows/test-dgraph.yml"
test-spark:
name: "Test Spark (Dgraph ${{ matrix.dgraph-version }})"
needs: build
strategy:
fail-fast: false
matrix:
include:
- dgraph-version: "21.12.0"
- dgraph-version: "22.0.2"
- dgraph-version: "23.1.1"
- dgraph-version: "24.0.2"
uses: "./.github/workflows/test-spark.yml"
with:
dgraph-version: ${{ matrix.dgraph-version }}
test-scala:
name: "Test Scala (2.13)"
needs: build
uses: "./.github/workflows/test-scala.yml"
with:
# not running 4.0.0-preview2 here as it is tested in test-dgraph already
matrix: |
{
"include": [
{
"spark-version": "3.2.4",
"spark-compat-version": "3.2",
"scala-compat-version": "2.13",
"scala-version": "2.13.5",
"java-version": "11",
"dgraph-version": "24.0.2"
},
{
"spark-version": "3.3.4",
"spark-compat-version": "3.3",
"scala-compat-version": "2.13",
"scala-version": "2.13.8",
"java-version": "11",
"dgraph-version": "24.0.2"
},
{
"spark-version": "3.4.3",
"spark-compat-version": "3.4",
"scala-compat-version": "2.13",
"scala-version": "2.13.8",
"java-version": "11",
"dgraph-version": "24.0.2"
},
{
"spark-version": "3.5.3",
"spark-compat-version": "3.5",
"scala-compat-version": "2.13",
"scala-version": "2.13.8",
"java-version": "11",
"dgraph-version": "24.0.2"
}
]
}
test-python:
name: "Test Python"
needs: [test-dgraph, test-spark]
uses: "./.github/workflows/test-python.yml"
test-integration:
name: "Test Integration"
needs: [test-dgraph, test-spark]
uses: "./.github/workflows/test-integration.yml"
delete_binaries:
name: "Delete Binaries"
runs-on: ubuntu-latest
needs: [test-dgraph, test-spark, test-scala, test-python, test-integration]
steps:
- name: Delete Binaries Artifact
uses: geekyeggo/delete-artifact@v5
with:
name: "Binaries-*"