diff --git a/src/databricks.tf b/src/databricks.tf index 2a2ce59..81f5a7c 100644 --- a/src/databricks.tf +++ b/src/databricks.tf @@ -96,7 +96,7 @@ resource "databricks_library" "wheel" { resource "databricks_library" "kafka" { cluster_id = databricks_cluster.this.id maven { - coordinates = "org.apache.spark:spark-sql-kafka-0-10_2.12:3.3.2" + coordinates = "org.apache.spark:spark-sql-kafka-0-10_2.12:3.4.0" } } diff --git a/src/docker-compose.yml b/src/docker-compose.yml index e18e64d..fcdd149 100644 --- a/src/docker-compose.yml +++ b/src/docker-compose.yml @@ -50,7 +50,7 @@ services: # Explicitly delete Wheel before building in order to avoid picking up existing version from mount entrypoint: ["bash", "-c", "rm -rf /common_lib/dist/*.whl && pip install build && python -m build /common_lib"] spark: - image: apache/spark-py:v3.3.2 + image: apache/spark-py:v3.4.0 ports: - 4040:4040 volumes: @@ -74,7 +74,7 @@ services: # The mssql-jdbc package must be explicitly added https://stackoverflow.com/questions/66903523/apache-spark-connector-for-sql-server-and-azure-sql # The default Ivy Cache location is not writable in the container, hence we have to override it to /tmp https://stackoverflow.com/a/69559038 command: ["/opt/spark/bin/spark-submit", "--packages", - "org.apache.spark:spark-sql-kafka-0-10_2.12:3.3.2,com.microsoft.azure:spark-mssql-connector_2.12:1.3.0-BETA,com.microsoft.sqlserver:mssql-jdbc:12.2.0.jre8", + "org.apache.spark:spark-sql-kafka-0-10_2.12:3.4.0,com.microsoft.azure:spark-mssql-connector_2.12:1.3.0-BETA,com.microsoft.sqlserver:mssql-jdbc:12.4.0.jre8", "--conf", "spark.driver.extraJavaOptions=-Divy.cache.dir=/tmp -Divy.home=/tmp", "--conf", "spark.ui.prometheus.enabled=true", "--conf", "spark.executor.processTreeMetrics.enabled=true", diff --git a/src/grafana/dashboards/spark_performance.json b/src/grafana/dashboards/spark_performance.json index 0565f7f..3bb36dd 100644 --- a/src/grafana/dashboards/spark_performance.json +++ b/src/grafana/dashboards/spark_performance.json @@ -275,7 +275,7 @@ "sortBy": [ { "desc": false, - "displayName": "{__name__=\"spark_info\", instance=\"spark:4040\", job=\"Spark Executors Metrics\", revision=\"5103e00c4ce5fcc4264ca9c4df12295d42557af6\", version=\"3.3.2\"}" + "displayName": "{__name__=\"spark_info\", instance=\"spark:4040\", job=\"Spark Executors Metrics\", revision=\"87a5442f7ed96b11051d8a9333476d080054e5a0\", version=\"3.4.0\"}" } ] }, diff --git a/src/main.tf b/src/main.tf index 8d1e85d..2bfd6c7 100644 --- a/src/main.tf +++ b/src/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { azurerm = { source = "hashicorp/azurerm" - version = "=3.48.0" + version = "=3.72.0" } databricks = { source = "databricks/databricks" - version = "=1.13.0" + version = "=1.24.1" } } } diff --git a/src/requirements.txt b/src/requirements.txt index eb7dad8..230c5c2 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -1,6 +1,6 @@ build flake8 -pyspark==3.3.2 +pyspark==3.4.0 pytest pytest-github-report setuptools