Skip to content

Commit

Permalink
fix(pyspark): suppress errors for unsupported Databricks serverless c…
Browse files Browse the repository at this point in the history
…ompute properties (#9830)
  • Loading branch information
ArtnerC authored Aug 23, 2024
1 parent 9e52edb commit 57f5ff6
Showing 1 changed file with 13 additions and 1 deletion.
14 changes: 13 additions & 1 deletion ibis/backends/pyspark/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,19 @@ def do_connect(
# local time to UTC with microsecond resolution.
# https://spark.apache.org/docs/latest/sql-pyspark-pandas-with-arrow.html#timestamp-with-time-zone-semantics
self._session.conf.set("spark.sql.session.timeZone", "UTC")
self._session.conf.set("spark.sql.mapKeyDedupPolicy", "LAST_WIN")

# Databricks Serverless compute only supports limited properties
# and any attempt to set unsupported properties will result in an error.
# https://docs.databricks.com/en/spark/conf.html
try:
from pyspark.errors.exceptions.connect import SparkConnectGrpcException
except ImportError:
# Use a dummy class for when spark connect is not available
class SparkConnectGrpcException(Exception):
pass

with contextlib.suppress(SparkConnectGrpcException):
self._session.conf.set("spark.sql.mapKeyDedupPolicy", "LAST_WIN")

for key, value in kwargs.items():
self._session.conf.set(key, value)
Expand Down

0 comments on commit 57f5ff6

Please sign in to comment.