diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 53c21e6..e0e926c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -32,7 +32,7 @@ on: - master env: - SPARK_VERSION: '3.5.1' + SPARK_VERSION: '3.5.2' HADOOP_VERSION: '3' permissions: @@ -84,7 +84,7 @@ jobs: tar -xzf spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}.tgz # Delete the old file rm spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}.tgz - + # Setup the Environment Variables echo "Apache Spark is ready to use" echo "SPARK_HOME=${GITHUB_WORKSPACE}/deps/spark-${{ env.SPARK_VERSION }}-bin-hadoop${{ env.HADOOP_VERSION }}" >> "$GITHUB_ENV" diff --git a/README.md b/README.md index ff158af..1bfaffe 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Step 4: Setup the Spark Driver on localhost. 2. Start the Spark Connect server with the following command (make sure to use a package version that matches your Spark distribution): ``` -sbin/start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.0 +sbin/start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.2 ``` Step 5: Run the example Go application. diff --git a/internal/tests/integration/spark_runner.go b/internal/tests/integration/spark_runner.go index 86a8881..060693f 100644 --- a/internal/tests/integration/spark_runner.go +++ b/internal/tests/integration/spark_runner.go @@ -36,7 +36,7 @@ func StartSparkConnect() (int64, error) { cmd := exec.Command("./sbin/start-connect-server.sh", "--wait", "--conf", "spark.log.structuredLogging.enabled=false", "--packages", - "org.apache.spark:spark-connect_2.12:3.5.1") + "org.apache.spark:spark-connect_2.12:3.5.2") cmd.Dir = sparkHome stdout, _ := cmd.StdoutPipe() diff --git a/quick-start.md b/quick-start.md index c26140e..cf1b83a 100644 --- a/quick-start.md +++ b/quick-start.md @@ -115,7 +115,7 @@ func main() { Download a Spark distribution (3.5.0+), unzip the folder, run command: ``` -sbin/start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.0 +sbin/start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.2 ``` ## Run Spark Connect Client Application