@@ -28,11 +28,12 @@ def sentry_init_with_reset(sentry_init):
28
28
_processed_integrations .discard ("spark" )
29
29
30
30
31
- @pytest .fixture (scope = "function " )
31
+ @pytest .fixture (scope = "session " )
32
32
def create_spark_context ():
33
33
conf = SparkConf ().set ("spark.driver.bindAddress" , "127.0.0.1" )
34
- yield lambda : SparkContext (conf = conf , appName = "Testing123" )
35
- SparkContext ._active_spark_context .stop ()
34
+ sc = SparkContext (conf = conf , appName = "Testing123" )
35
+ yield lambda : sc
36
+ sc .stop ()
36
37
37
38
38
39
def test_set_app_properties (create_spark_context ):
@@ -61,12 +62,18 @@ def test_start_sentry_listener(create_spark_context):
61
62
def test_initialize_spark_integration_before_spark_context_init (
62
63
mock_patch_spark_context_init ,
63
64
sentry_init_with_reset ,
64
- create_spark_context ,
65
65
):
66
- sentry_init_with_reset ()
67
- create_spark_context ()
68
-
69
- mock_patch_spark_context_init .assert_called_once ()
66
+ # As we are using the same SparkContext connection for the whole session,
67
+ # we clean it during this test.
68
+ original_context = SparkContext ._active_spark_context
69
+ SparkContext ._active_spark_context = None
70
+
71
+ try :
72
+ sentry_init_with_reset ()
73
+ mock_patch_spark_context_init .assert_called_once ()
74
+ finally :
75
+ # Restore the original one.
76
+ SparkContext ._active_spark_context = original_context
70
77
71
78
72
79
@patch ("sentry_sdk.integrations.spark.spark_driver._activate_integration" )
0 commit comments