Skip to content

Commit 52646b2

Browse files
committed
[DJM-974] Use reflection for constructor in Scala 2.12, lookup by parameter classes
1 parent 1934a3f commit 52646b2

File tree

2 files changed

+27
-15
lines changed

2 files changed

+27
-15
lines changed

dd-java-agent/instrumentation/spark/spark_2.12/src/main/java/datadog/trace/instrumentation/spark/Spark212Instrumentation.java

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
import org.slf4j.LoggerFactory;
1515
import scala.Predef;
1616
import scala.collection.JavaConverters;
17+
import scala.collection.Map;
18+
import scala.collection.Seq;
1719

1820
@AutoService(InstrumenterModule.class)
1921
public class Spark212Instrumentation extends AbstractSparkInstrumentation {
@@ -101,14 +103,26 @@ public static void exit(
101103
&& (Config.get().isDataJobsParseSparkPlanEnabled()
102104
|| Config.get().isDataJobsExperimentalFeaturesEnabled())) {
103105
Spark212PlanSerializer planUtils = new Spark212PlanSerializer();
104-
planInfo =
105-
new SparkPlanInfo(
106-
planInfo.nodeName(),
107-
planInfo.simpleString(),
108-
planInfo.children(),
109-
JavaConverters.mapAsScalaMap(planUtils.extractFormattedProduct(plan))
110-
.toMap(Predef.$conforms()),
111-
planInfo.metrics());
106+
scala.collection.immutable.Map<String, String> meta =
107+
JavaConverters.mapAsScalaMap(planUtils.extractFormattedProduct(plan))
108+
.toMap(Predef.$conforms());
109+
try {
110+
Class<?> spiClass = Class.forName("org.apache.spark.sql.execution.SparkPlanInfo");
111+
java.lang.reflect.Constructor<?> targetCtor =
112+
spiClass.getConstructor(
113+
new Class[] {String.class, String.class, Seq.class, Map.class, Seq.class});
114+
if (targetCtor != null) {
115+
Object newInst =
116+
targetCtor.newInstance(
117+
planInfo.nodeName(),
118+
planInfo.simpleString(),
119+
planInfo.children(),
120+
meta,
121+
planInfo.metrics());
122+
planInfo = (SparkPlanInfo) newInst;
123+
}
124+
} catch (Throwable ignored) {
125+
}
112126
}
113127
}
114128
}

dd-java-agent/instrumentation/spark/spark_2.13/src/main/java/datadog/trace/instrumentation/spark/Spark213Instrumentation.java

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
import org.slf4j.Logger;
1515
import org.slf4j.LoggerFactory;
1616
import scala.collection.JavaConverters;
17+
import scala.collection.Map;
18+
import scala.collection.Seq;
1719
import scala.collection.immutable.HashMap;
1820

1921
@AutoService(InstrumenterModule.class)
@@ -108,13 +110,9 @@ public static void exit(
108110
HashMap.from(JavaConverters.asScala(planUtils.extractFormattedProduct(plan)));
109111
try {
110112
Class<?> spiClass = Class.forName("org.apache.spark.sql.execution.SparkPlanInfo");
111-
java.lang.reflect.Constructor<?> targetCtor = null;
112-
for (java.lang.reflect.Constructor<?> c : spiClass.getConstructors()) {
113-
if (c.getParameterCount() == 5) {
114-
targetCtor = c;
115-
break;
116-
}
117-
}
113+
java.lang.reflect.Constructor<?> targetCtor =
114+
spiClass.getConstructor(
115+
new Class[] {String.class, String.class, Seq.class, Map.class, Seq.class});
118116
if (targetCtor != null) {
119117
Object newInst =
120118
targetCtor.newInstance(

0 commit comments

Comments
 (0)