Skip to content

Commit ec1ee73

Browse files
committed
Refactor Spark configuration keys in create_spark function
- Updated the configuration keys in the create_spark function within startup.py to use a more concise naming convention, changing 'spark.executor.memory', 'spark.executor.cores', and 'spark.executor.instances' to 'executor_memory', 'executor_cores', and 'executor_instances' respectively. - Adjusted the corresponding references in the Spark session creation logic to align with the new key names, improving consistency and readability of the configuration handling.
1 parent ed26d42 commit ec1ee73

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

docker/notebook/startup.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -116,9 +116,9 @@ def create_spark(notebook_path=None):
116116
except Exception as e:
117117
logger.error(f"Error loading config: {str(e)}. Using defaults.")
118118
config_json = {
119-
'spark.executor.memory': '1g',
120-
'spark.executor.cores': 1,
121-
'spark.executor.instances': 1
119+
'executor_memory': '1g',
120+
'executor_cores': 1,
121+
'executor_instances': 1
122122
}
123123

124124
spark = PawMarkSparkSession(
@@ -133,9 +133,9 @@ def create_spark(notebook_path=None):
133133
.config("spark.eventLog.dir", "/opt/data/spark-events") \
134134
.config("spark.history.fs.logDirectory", "/opt/data/spark-events") \
135135
.config("spark.sql.warehouse.dir", "/opt/data/spark-warehouse") \
136-
.config("executor.memory", config_json['executor.memory']) \
137-
.config("executor.cores", config_json['executor.cores']) \
138-
.config("spark.executor.instances", config_json['spark.executor.instances']) \
136+
.config("executor.memory", config_json['executor_memory']) \
137+
.config("executor.cores", config_json['executor_cores']) \
138+
.config("spark.executor.instances", config_json['executor_instances']) \
139139
.getOrCreate()
140140
)
141141

0 commit comments

Comments
 (0)