[CELEBORN-1230] Check working directory read and write error without … #8848
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will build a Java project with Maven | |
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven | |
name: Celeborn CI | |
on: | |
push: | |
branches: | |
- main | |
- branch-* | |
pull_request: | |
branches: | |
- main | |
- branch-* | |
jobs: | |
service: | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
java: | |
- 8 | |
- 11 | |
- 17 | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup JDK ${{ matrix.java }} | |
uses: actions/setup-java@v2 | |
with: | |
distribution: zulu | |
java-version: ${{ matrix.java }} | |
cache: maven | |
check-latest: false | |
- name: Test Service with Maven | |
run: build/mvn -Pgoogle-mirror test | |
- name: Upload test log | |
if: failure() | |
uses: actions/upload-artifact@v3 | |
with: | |
name: service-unit-test-log | |
path: | | |
**/target/unit-tests.log | |
- name: Upload coverage to Codecov | |
if: matrix.java == 8 | |
uses: codecov/codecov-action@v3 | |
spark2: | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
java: | |
- 8 | |
spark: | |
- '2.4' | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup JDK ${{ matrix.java }} | |
uses: actions/setup-java@v2 | |
with: | |
distribution: zulu | |
java-version: ${{ matrix.java }} | |
cache: maven | |
check-latest: false | |
- name: Test with Maven | |
run: | | |
SPARK_BINARY_VERSION=${{ matrix.spark }} | |
SPARK_MAJOR_VERSION=${SPARK_BINARY_VERSION%%.*} | |
PROFILES="-Pgoogle-mirror,spark-${{ matrix.spark }}" | |
TEST_MODULES="client-spark/common,client-spark/spark-${SPARK_MAJOR_VERSION},client-spark/spark-${SPARK_MAJOR_VERSION}-shaded,tests/spark-it" | |
build/mvn $PROFILES -pl $TEST_MODULES -am clean install -DskipTests | |
build/mvn $PROFILES -pl $TEST_MODULES test | |
- name: Upload test log | |
if: failure() | |
uses: actions/upload-artifact@v3 | |
with: | |
name: spark-${{ matrix.spark }}-unit-test-log | |
path: | | |
**/target/unit-tests.log | |
spark3: | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
java: | |
- 8 | |
- 11 | |
- 17 | |
spark: | |
- '3.0' | |
- '3.1' | |
- '3.2' | |
- '3.3' | |
- '3.4' | |
- '3.5' | |
shuffle-plugin-class: | |
- 'org.apache.spark.shuffle.sort.io.LocalDiskShuffleDataIO' | |
- 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO' | |
exclude: | |
# SPARK-33772: Spark supports JDK 17 since 3.3.0 | |
- java: 17 | |
spark: '3.0' | |
- java: 17 | |
spark: '3.1' | |
- java: 17 | |
spark: '3.2' | |
# ShuffleDriverComponents#supportsReliableStorage was introduced in 3.5.0 | |
- shuffle-plugin-class: 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO' | |
spark: '3.0' | |
- shuffle-plugin-class: 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO' | |
spark: '3.1' | |
- shuffle-plugin-class: 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO' | |
spark: '3.2' | |
- shuffle-plugin-class: 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO' | |
spark: '3.3' | |
- shuffle-plugin-class: 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO' | |
spark: '3.4' | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup JDK ${{ matrix.java }} | |
uses: actions/setup-java@v2 | |
with: | |
distribution: zulu | |
java-version: ${{ matrix.java }} | |
cache: maven | |
check-latest: false | |
- name: Test with Maven | |
run: | | |
SPARK_BINARY_VERSION=${{ matrix.spark }} | |
SPARK_MAJOR_VERSION=${SPARK_BINARY_VERSION%%.*} | |
PROFILES="-Pgoogle-mirror,spark-${{ matrix.spark }}" | |
TEST_MODULES="client-spark/common,client-spark/spark-${SPARK_MAJOR_VERSION},client-spark/spark-${SPARK_MAJOR_VERSION}-shaded,tests/spark-it" | |
build/mvn $PROFILES -pl $TEST_MODULES -am clean install -DskipTests | |
build/mvn $PROFILES -pl $TEST_MODULES -Dspark.shuffle.sort.io.plugin.class=${{ matrix.shuffle-plugin-class }} test | |
- name: Upload test log | |
if: failure() | |
uses: actions/upload-artifact@v3 | |
with: | |
name: spark-${{ matrix.spark }}-unit-test-log | |
path: | | |
**/target/unit-tests.log | |
flink: | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
java: | |
- 8 | |
- 11 | |
flink: | |
- '1.14' | |
- '1.15' | |
- '1.17' | |
- '1.18' | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup JDK ${{ matrix.java }} | |
uses: actions/setup-java@v2 | |
with: | |
distribution: zulu | |
java-version: ${{ matrix.java }} | |
cache: maven | |
check-latest: false | |
- name: Test with Maven | |
run: | | |
PROFILES="-Pgoogle-mirror,flink-${{ matrix.flink }}" | |
TEST_MODULES="client-flink/common,client-flink/flink-${{ matrix.flink }},client-flink/flink-${{ matrix.flink }}-shaded,tests/flink-it" | |
build/mvn $PROFILES -pl $TEST_MODULES -am clean install -DskipTests | |
build/mvn $PROFILES -pl $TEST_MODULES test | |
- name: Upload test log | |
if: failure() | |
uses: actions/upload-artifact@v3 | |
with: | |
name: flink-${{ matrix.flink }}-unit-test-log | |
path: | | |
**/target/unit-tests.log | |
mr: | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
java: | |
- 8 | |
- 11 | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup JDK ${{ matrix.java }} | |
uses: actions/setup-java@v2 | |
with: | |
distribution: zulu | |
java-version: ${{ matrix.java }} | |
cache: maven | |
check-latest: false | |
- name: Test with Maven | |
run: | | |
PROFILES="-Pgoogle-mirror,mr" | |
TEST_MODULES="client-mr/mr,client-mr/mr-shaded,tests/mr-it" | |
build/mvn $PROFILES -pl $TEST_MODULES -am clean install -DskipTests | |
build/mvn $PROFILES -pl $TEST_MODULES test | |
- name: Upload test log | |
if: failure() | |
uses: actions/upload-artifact@v3 | |
with: | |
name: mr-unit-test-log | |
path: | | |
**/target/unit-tests.log |