-
Notifications
You must be signed in to change notification settings - Fork 279
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add instrumentation support for spark built on scala 2.11
- Loading branch information
1 parent
cdecb42
commit abcf9d7
Showing
12 changed files
with
201 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
54 changes: 54 additions & 0 deletions
54
dd-java-agent/instrumentation/spark/spark_2.11/build.gradle
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
plugins { | ||
id 'java-test-fixtures' | ||
} | ||
|
||
def sparkVersion = '2.4.0' | ||
def scalaVersion = '2.11' | ||
|
||
muzzle { | ||
pass { | ||
group = "org.apache.spark" | ||
module = "spark-sql_$scalaVersion" | ||
versions = "[$sparkVersion,2.4.8]" | ||
assertInverse = true | ||
} | ||
} | ||
configurations.all { | ||
resolutionStrategy.deactivateDependencyLocking() | ||
} | ||
apply from: "$rootDir/gradle/java.gradle" | ||
|
||
addTestSuiteForDir('latestDepTest', 'test') | ||
addTestSuite('test_spark24') | ||
|
||
ext { | ||
// Hadoop does not behave correctly with OpenJ9 https://issues.apache.org/jira/browse/HADOOP-18174 | ||
excludeJdk = ['SEMERU8', 'SEMERU11'] | ||
|
||
// Spark does not support Java > 11 until 3.3.0 https://issues.apache.org/jira/browse/SPARK-33772 | ||
maxJavaVersionForTests = JavaVersion.VERSION_11 | ||
} | ||
|
||
dependencies { | ||
implementation project(':dd-java-agent:instrumentation:spark') | ||
|
||
compileOnly group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: "$sparkVersion" | ||
compileOnly group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: "$sparkVersion" | ||
|
||
testImplementation(testFixtures(project(":dd-java-agent:instrumentation:spark"))) | ||
testImplementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: "$sparkVersion" | ||
testImplementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: "$sparkVersion" | ||
testImplementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: "$sparkVersion" | ||
|
||
test_spark24Implementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: "2.4.0" | ||
test_spark24Implementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: "2.4.0" | ||
test_spark24Implementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: "2.4.0" | ||
|
||
latestDepTestImplementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: '+' | ||
latestDepTestImplementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: '+' | ||
latestDepTestImplementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: '+' | ||
} | ||
|
||
tasks.named("test").configure { | ||
dependsOn "test_spark24" | ||
} |
4 changes: 4 additions & 0 deletions
4
dd-java-agent/instrumentation/spark/spark_2.11/gradle.lockfile
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
# This is a Gradle generated file for dependency locking. | ||
# Manual edits can break the build and are not advised. | ||
# This file is expected to be part of source control. | ||
empty=spotbugsPlugins,testFixturesAnnotationProcessor |
65 changes: 65 additions & 0 deletions
65
...spark_2.11/src/main/java/datadog/trace/instrumentation/spark/DatadogSpark211Listener.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
package datadog.trace.instrumentation.spark; | ||
|
||
import java.util.ArrayList; | ||
import java.util.Collection; | ||
import java.util.List; | ||
import org.apache.spark.SparkConf; | ||
import org.apache.spark.scheduler.SparkListenerJobStart; | ||
import org.apache.spark.scheduler.StageInfo; | ||
import org.apache.spark.sql.execution.SparkPlanInfo; | ||
import org.apache.spark.sql.execution.metric.SQLMetricInfo; | ||
import scala.collection.JavaConverters; | ||
|
||
/** | ||
* DatadogSparkListener compiled for Scala 2.11 | ||
* | ||
* <p>The signature of scala.Seq change between 2.12 and 2.13. Methods using scala.Seq needs to be | ||
* compiled with the specific scala version | ||
*/ | ||
public class DatadogSpark211Listener extends AbstractDatadogSparkListener { | ||
public DatadogSpark211Listener(SparkConf sparkConf, String appId, String sparkVersion) { | ||
super(sparkConf, appId, sparkVersion); | ||
} | ||
|
||
@Override | ||
protected ArrayList<Integer> getSparkJobStageIds(SparkListenerJobStart jobStart) { | ||
ArrayList<Integer> javaIds = new ArrayList<>(jobStart.stageInfos().length()); | ||
JavaConverters.seqAsJavaListConverter(jobStart.stageInfos()).asJava().forEach(stage -> javaIds.add(stage.stageId())); | ||
return javaIds; | ||
} | ||
|
||
@Override | ||
protected String getSparkJobName(SparkListenerJobStart jobStart) { | ||
if (jobStart.stageInfos().nonEmpty()) { | ||
// In the spark UI, the name of a job is the name of its last stage | ||
return jobStart.stageInfos().last().name(); | ||
} | ||
|
||
return null; | ||
} | ||
|
||
@Override | ||
protected int getStageCount(SparkListenerJobStart jobStart) { | ||
return jobStart.stageInfos().length(); | ||
} | ||
|
||
@Override | ||
protected Collection<SparkPlanInfo> getPlanInfoChildren(SparkPlanInfo info) { | ||
return JavaConverters.asJavaCollectionConverter(info.children()).asJavaCollection(); | ||
} | ||
|
||
@Override | ||
protected List<SQLMetricInfo> getPlanInfoMetrics(SparkPlanInfo info) { | ||
return JavaConverters.seqAsJavaListConverter(info.metrics()).asJava(); | ||
} | ||
|
||
@Override | ||
protected int[] getStageParentIds(StageInfo info) { | ||
int[] parentIds = new int[info.parentIds().length()]; | ||
for (int i = 0; i < parentIds.length; i++) { | ||
parentIds[i] = (int) info.parentIds().apply(i); | ||
} | ||
|
||
return parentIds; | ||
} | ||
} |
49 changes: 49 additions & 0 deletions
49
...spark_2.11/src/main/java/datadog/trace/instrumentation/spark/Spark211Instrumentation.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
package datadog.trace.instrumentation.spark; | ||
|
||
import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; | ||
import static net.bytebuddy.matcher.ElementMatchers.*; | ||
|
||
import com.google.auto.service.AutoService; | ||
import datadog.trace.agent.tooling.InstrumenterModule; | ||
import net.bytebuddy.asm.Advice; | ||
import org.apache.spark.SparkContext; | ||
|
||
@AutoService(InstrumenterModule.class) | ||
public class Spark211Instrumentation extends AbstractSparkInstrumentation { | ||
@Override | ||
public String[] helperClassNames() { | ||
return new String[] { | ||
packageName + ".AbstractDatadogSparkListener", | ||
packageName + ".DatabricksParentContext", | ||
packageName + ".DatadogSpark211Listener", | ||
packageName + ".RemoveEldestHashMap", | ||
packageName + ".SparkAggregatedTaskMetrics", | ||
packageName + ".SparkConfAllowList", | ||
packageName + ".SparkSQLUtils", | ||
packageName + ".SparkSQLUtils$SparkPlanInfoForStage", | ||
packageName + ".SparkSQLUtils$AccumulatorWithStage", | ||
}; | ||
} | ||
|
||
@Override | ||
public void methodAdvice(MethodTransformer transformer) { | ||
super.methodAdvice(transformer); | ||
|
||
transformer.applyAdvice( | ||
isMethod() | ||
.and(named("setupAndStartListenerBus")) | ||
.and(isDeclaredBy(named("org.apache.spark.SparkContext"))) | ||
.and(takesNoArguments()), | ||
Spark211Instrumentation.class.getName() + "$InjectListener"); | ||
} | ||
|
||
public static class InjectListener { | ||
@Advice.OnMethodEnter(suppress = Throwable.class) | ||
public static void enter(@Advice.This SparkContext sparkContext) { | ||
AbstractDatadogSparkListener.listener = | ||
new DatadogSpark211Listener( | ||
sparkContext.getConf(), sparkContext.applicationId(), sparkContext.version()); | ||
sparkContext.listenerBus().addToSharedQueue(AbstractDatadogSparkListener.listener); | ||
} | ||
} | ||
} |
12 changes: 12 additions & 0 deletions
12
dd-java-agent/instrumentation/spark/spark_2.11/src/test/groovy/SparkListenerTest.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
import datadog.trace.instrumentation.spark.AbstractSparkListenerTest | ||
import datadog.trace.instrumentation.spark.DatadogSpark211Listener | ||
import org.apache.spark.SparkConf | ||
import org.apache.spark.scheduler.SparkListener | ||
|
||
class SparkListenerTest extends AbstractSparkListenerTest { | ||
@Override | ||
protected SparkListener getTestDatadogSparkListener() { | ||
def conf = new SparkConf() | ||
return new DatadogSpark211Listener(conf, "some_app_id", "some_version") | ||
} | ||
} |
3 changes: 3 additions & 0 deletions
3
...gent/instrumentation/spark/spark_2.11/src/test/groovy/SparkStructuredStreamingTest.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
import datadog.trace.instrumentation.spark.AbstractSparkStructuredStreamingTest | ||
|
||
class SparkStructuredStreamingTest extends AbstractSparkStructuredStreamingTest {} |
3 changes: 3 additions & 0 deletions
3
dd-java-agent/instrumentation/spark/spark_2.11/src/test/groovy/SparkTest.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
import datadog.trace.instrumentation.spark.AbstractSparkTest | ||
|
||
class SparkTest extends AbstractSparkTest {} |
3 changes: 3 additions & 0 deletions
3
dd-java-agent/instrumentation/spark/spark_2.11/src/test_spark24/groovy/Spark24SqlTest.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
import datadog.trace.instrumentation.spark.AbstractSpark24SqlTest | ||
|
||
class Spark24SqlTest extends AbstractSpark24SqlTest {} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters