|
1 | 1 | from sentry_sdk import configure_scope
|
2 | 2 | from sentry_sdk.hub import Hub
|
3 | 3 | from sentry_sdk.integrations import Integration
|
| 4 | +from sentry_sdk.utils import capture_internal_exceptions |
4 | 5 |
|
5 | 6 |
|
6 | 7 | class SparkIntegration(Integration):
|
@@ -54,37 +55,39 @@ def _sentry_patched_spark_context_init(self, *args, **kwargs):
|
54 | 55 | _start_sentry_listener(self)
|
55 | 56 | _set_app_properties()
|
56 | 57 |
|
57 |
| - with configure_scope() as scope: |
| 58 | + with capture_internal_exceptions(): |
| 59 | + with configure_scope() as scope: |
| 60 | + |
| 61 | + @scope.add_event_processor |
| 62 | + def process_event(event, hint): |
| 63 | + if Hub.current.get_integration(SparkIntegration) is None: |
| 64 | + return event |
| 65 | + |
| 66 | + event.setdefault("user", {}).setdefault("id", self.sparkUser()) |
| 67 | + |
| 68 | + event.setdefault("tags", {}).setdefault( |
| 69 | + "executor.id", self._conf.get("spark.executor.id") |
| 70 | + ) |
| 71 | + event["tags"].setdefault( |
| 72 | + "spark.submit.deployMode", |
| 73 | + self._conf.get("spark.submit.deployMode"), |
| 74 | + ) |
| 75 | + event["tags"].setdefault( |
| 76 | + "driver.host", self._conf.get("spark.driver.host") |
| 77 | + ) |
| 78 | + event["tags"].setdefault( |
| 79 | + "driver.port", self._conf.get("spark.driver.port") |
| 80 | + ) |
| 81 | + event["tags"].setdefault("spark_version", self.version) |
| 82 | + event["tags"].setdefault("app_name", self.appName) |
| 83 | + event["tags"].setdefault("application_id", self.applicationId) |
| 84 | + event["tags"].setdefault("master", self.master) |
| 85 | + event["tags"].setdefault("spark.home", self.sparkHome) |
| 86 | + |
| 87 | + event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl) |
58 | 88 |
|
59 |
| - @scope.add_event_processor |
60 |
| - def process_event(event, hint): |
61 |
| - if Hub.current.get_integration(SparkIntegration) is None: |
62 | 89 | return event
|
63 | 90 |
|
64 |
| - event.setdefault("user", {}).setdefault("id", self.sparkUser()) |
65 |
| - |
66 |
| - event.setdefault("tags", {}).setdefault( |
67 |
| - "executor.id", self._conf.get("spark.executor.id") |
68 |
| - ) |
69 |
| - event["tags"].setdefault( |
70 |
| - "spark.submit.deployMode", self._conf.get("spark.submit.deployMode") |
71 |
| - ) |
72 |
| - event["tags"].setdefault( |
73 |
| - "driver.host", self._conf.get("spark.driver.host") |
74 |
| - ) |
75 |
| - event["tags"].setdefault( |
76 |
| - "driver.port", self._conf.get("spark.driver.port") |
77 |
| - ) |
78 |
| - event["tags"].setdefault("spark_version", self.version) |
79 |
| - event["tags"].setdefault("app_name", self.appName) |
80 |
| - event["tags"].setdefault("application_id", self.applicationId) |
81 |
| - event["tags"].setdefault("master", self.master) |
82 |
| - event["tags"].setdefault("spark.home", self.sparkHome) |
83 |
| - |
84 |
| - event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl) |
85 |
| - |
86 |
| - return event |
87 |
| - |
88 | 91 | return init
|
89 | 92 |
|
90 | 93 | SparkContext._do_init = _sentry_patched_spark_context_init
|
|
0 commit comments