@@ -138,19 +138,16 @@ You can install the Datadog Agent using the [Datadog Operator][3] or [Helm][4].
138
138
139
139
When you run your Spark job, use the following configurations :
140
140
141
- ` spark.kubernetes.driver.label.admission.datadoghq.com/enabled` (Required)
141
+ ` spark.kubernetes.{ driver,executor} .label.admission.datadoghq.com/enabled` (Required)
142
142
: `true`
143
143
144
- ` spark.kubernetes.driver.annotation.admission.datadoghq.com/java-lib.version` (Required)
144
+ ` spark.kubernetes.{ driver,executor} .annotation.admission.datadoghq.com/java-lib.version` (Required)
145
145
: `latest`
146
146
147
- ` spark.driver.extraJavaOptions`
148
- : `-Ddd.integration.spark .enabled` (Required)
147
+ ` spark.{ driver,executor} .extraJavaOptions`
148
+ : `-Ddd.data.jobs .enabled=true ` (Required)
149
149
: `true`
150
150
151
- ` -Ddd.integrations.enabled` (Required)
152
- : `false`
153
-
154
151
` -Ddd.service` (Optional)
155
152
: Your service name. Because this option sets the _job name_ in Datadog, it is recommended that you use a human-readable name.
156
153
@@ -163,9 +160,6 @@ When you run your Spark job, use the following configurations:
163
160
` -Ddd.tags` (Optional)
164
161
: Other tags you wish to add, in the format `<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2>`.
165
162
166
- ` -Ddd.trace.experimental.long-running.enabled` (Optional)
167
- : `true` To view jobs while they are still running
168
-
169
163
170
164
# ### Example: spark-submit
171
165
@@ -177,9 +171,13 @@ spark-submit \
177
171
--deploy-mode cluster \
178
172
--conf spark.kubernetes.namespace=<NAMESPACE> \
179
173
--conf spark.kubernetes.authenticate.driver.serviceAccountName=<SERVICE_ACCOUNT> \
174
+ --conf spark.kubernetes.authenticate.executor.serviceAccountName=<SERVICE_ACCOUNT> \
180
175
--conf spark.kubernetes.driver.label.admission.datadoghq.com/enabled=true \
176
+ --conf spark.kubernetes.executor.label.admission.datadoghq.com/enabled=true \
181
177
--conf spark.kubernetes.driver.annotation.admission.datadoghq.com/java-lib.version=latest \
182
- --conf spark.driver.extraJavaOptions="-Ddd.integration.spark.enabled=true -Ddd.integrations.enabled=false -Ddd.service=<JOB_NAME> -Ddd.env=<ENV> -Ddd.version=<VERSION> -Ddd.tags=<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2> -Ddd.trace.experimental.long-running.enabled=true" \
178
+ --conf spark.kubernetes.executor.annotation.admission.datadoghq.com/java-lib.version=latest \
179
+ --conf spark.driver.extraJavaOptions="-Ddd.data.jobs.enabled=true -Ddd.service=<JOB_NAME> -Ddd.env=<ENV> -Ddd.version=<VERSION> -Ddd.tags=<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2>" \
180
+ --conf spark.executor.extraJavaOptions="-Ddd.data.jobs.enabled=true -Ddd.service=<JOB_NAME> -Ddd.env=<ENV> -Ddd.version=<VERSION> -Ddd.tags=<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2>" \
183
181
local:///usr/lib/spark/examples/jars/spark-examples.jar 20
184
182
` ` `
185
183
@@ -194,7 +192,7 @@ aws emr-containers start-job-run \
194
192
--job-driver '{
195
193
"sparkSubmitJobDriver": {
196
194
"entryPoint": "s3://BUCKET/spark-examples.jar",
197
- "sparkSubmitParameters": "--class <MAIN_CLASS> --conf spark.kubernetes.driver.label.admission.datadoghq.com/enabled=true --conf spark.kubernetes.driver.annotation.admission.datadoghq.com/java-lib.version=latest --conf spark.driver.extraJavaOptions=\" -Ddd.integration.spark .enabled=true -Ddd.integrations.enabled=false -Ddd. service=<JOB_NAME> -Ddd.env=<ENV> -Ddd.version=<VERSION> -Ddd.tags=<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2> -Ddd.trace.experimental.long-running. enabled=true\" "
195
+ "sparkSubmitParameters": "--class <MAIN_CLASS> --conf spark.kubernetes.driver.label.admission.datadoghq.com/enabled=true --conf spark.kubernetes.executor.label.admission.datadoghq.com/enabled=true --conf spark.kubernetes. driver.annotation.admission.datadoghq.com/java-lib.version=latest --conf spark.kubernetes.executor.annotation.admission.datadoghq.com/java-lib.version=latest --conf spark. driver.extraJavaOptions=\" -Ddd.data.jobs .enabled=true -Ddd.service=<JOB_NAME> -Ddd.env=<ENV> -Ddd.version=<VERSION> -Ddd.tags=<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2> --conf spark.executor.extraJavaOptions= \" -Ddd.data.jobs. enabled=true -Ddd.service=<JOB_NAME> -Ddd.env=<ENV> -Ddd.version=<VERSION> -Ddd.tags=<KEY_1>:<VALUE_1>,<KEY_2:VALUE_2> \" "
198
196
}
199
197
}
200
198
0 commit comments