Skip to content

Commit 750c68f

Browse files
committed
Use Abstract class for common functions
1 parent beb4d5f commit 750c68f

File tree

6 files changed

+25
-20
lines changed

6 files changed

+25
-20
lines changed

dd-java-agent/instrumentation/spark/spark_2.12/src/main/java/datadog/trace/instrumentation/spark/Spark212Instrumentation.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ public class Spark212Instrumentation extends AbstractSparkInstrumentation {
2121
public String[] helperClassNames() {
2222
return new String[] {
2323
packageName + ".AbstractDatadogSparkListener",
24+
packageName + ".AbstractSparkPlanUtils",
2425
packageName + ".DatabricksParentContext",
2526
packageName + ".OpenlineageParentContext",
2627
packageName + ".DatadogSpark212Listener",
@@ -31,7 +32,7 @@ public String[] helperClassNames() {
3132
packageName + ".SparkSQLUtils",
3233
packageName + ".SparkSQLUtils$SparkPlanInfoForStage",
3334
packageName + ".SparkSQLUtils$AccumulatorWithStage",
34-
packageName + ".Spark212PlanUtils",
35+
packageName + ".Spark212PlanUtils"
3536
};
3637
}
3738

@@ -109,14 +110,14 @@ public static void exit(
109110
@Advice.Return(readOnly = false) SparkPlanInfo planInfo,
110111
@Advice.Argument(0) SparkPlan plan) {
111112
if (planInfo.metadata().size() == 0) {
113+
Spark212PlanUtils planUtils = new Spark212PlanUtils();
112114
HashMap<String, String> args = new HashMap<>();
113115
planInfo =
114116
new SparkPlanInfo(
115117
planInfo.nodeName(),
116118
planInfo.simpleString(),
117119
planInfo.children(),
118-
args.$plus$plus(
119-
JavaConverters.mapAsScalaMap(Spark212PlanUtils.extractPlanProduct(plan))),
120+
args.$plus$plus(JavaConverters.mapAsScalaMap(planUtils.extractPlanProduct(plan))),
120121
planInfo.metrics());
121122
}
122123
}

dd-java-agent/instrumentation/spark/spark_2.12/src/main/java/datadog/trace/instrumentation/spark/Spark212PlanUtils.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@
33
import java.util.HashMap;
44
import java.util.Iterator;
55
import java.util.Map;
6-
import org.apache.spark.sql.execution.SparkPlan;
6+
import org.apache.spark.sql.catalyst.trees.TreeNode;
77
import scala.collection.JavaConverters;
88

99
// An extension of how Spark translates `SparkPlan`s to `SparkPlanInfo`, see here:
1010
// https://github.com/apache/spark/blob/v3.5.0/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala#L54
11-
public class Spark212PlanUtils {
12-
public static Map<String, String> extractPlanProduct(SparkPlan plan) {
11+
public class Spark212PlanUtils extends AbstractSparkPlanUtils {
12+
public Map<String, String> extractPlanProduct(TreeNode plan) {
1313
HashMap<String, String> args = new HashMap<>();
1414
HashMap<String, String> unparsed = new HashMap<>();
1515

@@ -19,7 +19,7 @@ public static Map<String, String> extractPlanProduct(SparkPlan plan) {
1919
Object obj = it.next();
2020
String key = String.format("_dd.unknown_key.%d", i);
2121

22-
String val = CommonSparkPlanUtils.parsePlanProduct(obj);
22+
String val = parsePlanProduct(obj);
2323
if (val != null) {
2424
args.put(key, val);
2525
} else {

dd-java-agent/instrumentation/spark/spark_2.13/src/main/java/datadog/trace/instrumentation/spark/Spark213Instrumentation.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ public class Spark213Instrumentation extends AbstractSparkInstrumentation {
2121
public String[] helperClassNames() {
2222
return new String[] {
2323
packageName + ".AbstractDatadogSparkListener",
24+
packageName + ".AbstractSparkPlanUtils",
2425
packageName + ".DatabricksParentContext",
2526
packageName + ".OpenlineageParentContext",
2627
packageName + ".DatadogSpark213Listener",
@@ -31,8 +32,7 @@ public String[] helperClassNames() {
3132
packageName + ".SparkSQLUtils",
3233
packageName + ".SparkSQLUtils$SparkPlanInfoForStage",
3334
packageName + ".SparkSQLUtils$AccumulatorWithStage",
34-
packageName + ".Spark213PlanUtils",
35-
packageName + ".CommonSparkPlanUtils",
35+
packageName + ".Spark213PlanUtils"
3636
};
3737
}
3838

@@ -111,13 +111,13 @@ public static void exit(
111111
@Advice.Return(readOnly = false) SparkPlanInfo planInfo,
112112
@Advice.Argument(0) SparkPlan plan) {
113113
if (planInfo.metadata().size() == 0) {
114+
Spark213PlanUtils planUtils = new Spark213PlanUtils();
114115
planInfo =
115116
new SparkPlanInfo(
116117
planInfo.nodeName(),
117118
planInfo.simpleString(),
118119
planInfo.children(),
119-
HashMap.from(
120-
JavaConverters.asScala(Spark213PlanUtils.extractPlanProduct(plan)).toList()),
120+
HashMap.from(JavaConverters.asScala(planUtils.extractPlanProduct(plan)).toList()),
121121
planInfo.metrics());
122122
}
123123
}

dd-java-agent/instrumentation/spark/spark_2.13/src/main/java/datadog/trace/instrumentation/spark/Spark213PlanUtils.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@
33
import java.util.HashMap;
44
import java.util.Iterator;
55
import java.util.Map;
6-
import org.apache.spark.sql.execution.SparkPlan;
6+
import org.apache.spark.sql.catalyst.trees.TreeNode;
77
import scala.collection.JavaConverters;
88

99
// An extension of how Spark translates `SparkPlan`s to `SparkPlanInfo`, see here:
1010
// https://github.com/apache/spark/blob/v3.5.0/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala#L54
11-
public class Spark213PlanUtils {
12-
public static Map<String, String> extractPlanProduct(SparkPlan plan) {
11+
public class Spark213PlanUtils extends AbstractSparkPlanUtils {
12+
public Map<String, String> extractPlanProduct(TreeNode plan) {
1313
HashMap<String, String> args = new HashMap<>();
1414
HashMap<String, String> unparsed = new HashMap<>();
1515

@@ -19,7 +19,7 @@ public static Map<String, String> extractPlanProduct(SparkPlan plan) {
1919
Object obj = it.next();
2020
String key = plan.productElementName(i);
2121

22-
String val = CommonSparkPlanUtils.parsePlanProduct(obj);
22+
String val = parsePlanProduct(obj);
2323
if (val != null) {
2424
args.put(key, val);
2525
} else {

dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/CommonSparkPlanUtils.java renamed to dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractSparkPlanUtils.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,16 @@
11
package datadog.trace.instrumentation.spark;
22

33
import java.util.ArrayList;
4+
import java.util.Map;
45
import org.apache.spark.sql.catalyst.plans.QueryPlan;
6+
import org.apache.spark.sql.catalyst.trees.TreeNode;
57
import scala.Option;
68
import scala.collection.Iterable;
79

8-
public class CommonSparkPlanUtils {
9-
public static String parsePlanProduct(Object value) {
10+
public abstract class AbstractSparkPlanUtils {
11+
public abstract Map<String, String> extractPlanProduct(TreeNode node);
12+
13+
public String parsePlanProduct(Object value) {
1014
if (value == null) {
1115
return "null";
1216
} else if (value instanceof Iterable) {

dd-java-agent/instrumentation/spark/src/testFixtures/groovy/datadog/trace/instrumentation/spark/AbstractSpark24SqlTest.groovy

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -183,13 +183,13 @@ abstract class AbstractSpark24SqlTest extends InstrumentationSpecification {
183183
var simpleString = actual["nodeDetailString"]
184184
var values = []
185185
var child = "N/A"
186-
actual["meta"].each() { key, value ->
186+
actual["meta"].each { key, value ->
187187
if (key == "_dd.unparsed") {
188188
values.add("\"_dd.unparsed\": \"any\"")
189189
child = value
190190
} else if (value instanceof List) {
191191
var list = []
192-
value.each() { it ->
192+
value.each { it ->
193193
list.add("\"$it\"")
194194
}
195195
def prettyList = "[\n " + list.join(", \n ") + "\n ]"
@@ -201,7 +201,7 @@ abstract class AbstractSpark24SqlTest extends InstrumentationSpecification {
201201
values.add("\"$key\": \"$value\"")
202202
}
203203
}
204-
values.sort() { it }
204+
values.sort { it }
205205
def prettyValues = "\n\"meta\": {\n " + values.join(", \n ") + "\n},"
206206
if (values.size() == 1) {
207207
prettyValues = "\n\"meta\": {" + values.join(", ") + "},"

0 commit comments

Comments
 (0)