Skip to content

Commit 91f2995

Browse files
authored
[DJM-974] Use reflection for constructor in Scala 2.12, lookup by parameter classes (#9886)
1 parent 1934a3f commit 91f2995

File tree

2 files changed

+32
-12
lines changed

2 files changed

+32
-12
lines changed

dd-java-agent/instrumentation/spark/spark_2.12/src/main/java/datadog/trace/instrumentation/spark/Spark212Instrumentation.java

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66
import com.google.auto.service.AutoService;
77
import datadog.trace.agent.tooling.InstrumenterModule;
88
import datadog.trace.api.Config;
9+
import de.thetaphi.forbiddenapis.SuppressForbidden;
10+
import java.lang.reflect.Constructor;
911
import net.bytebuddy.asm.Advice;
1012
import org.apache.spark.SparkContext;
1113
import org.apache.spark.sql.execution.SparkPlan;
@@ -14,6 +16,7 @@
1416
import org.slf4j.LoggerFactory;
1517
import scala.Predef;
1618
import scala.collection.JavaConverters;
19+
import scala.collection.immutable.Map;
1720

1821
@AutoService(InstrumenterModule.class)
1922
public class Spark212Instrumentation extends AbstractSparkInstrumentation {
@@ -94,21 +97,37 @@ public static void enter(@Advice.This SparkContext sparkContext) {
9497

9598
public static class SparkPlanInfoAdvice {
9699
@Advice.OnMethodExit(suppress = Throwable.class, onThrowable = Throwable.class)
100+
@SuppressForbidden
97101
public static void exit(
98102
@Advice.Return(readOnly = false) SparkPlanInfo planInfo,
99103
@Advice.Argument(0) SparkPlan plan) {
100104
if (planInfo.metadata().size() == 0
101105
&& (Config.get().isDataJobsParseSparkPlanEnabled()
102106
|| Config.get().isDataJobsExperimentalFeaturesEnabled())) {
103107
Spark212PlanSerializer planUtils = new Spark212PlanSerializer();
104-
planInfo =
105-
new SparkPlanInfo(
106-
planInfo.nodeName(),
107-
planInfo.simpleString(),
108-
planInfo.children(),
109-
JavaConverters.mapAsScalaMap(planUtils.extractFormattedProduct(plan))
110-
.toMap(Predef.$conforms()),
111-
planInfo.metrics());
108+
Map<String, String> meta =
109+
JavaConverters.mapAsScalaMap(planUtils.extractFormattedProduct(plan))
110+
.toMap(Predef.$conforms());
111+
try {
112+
Constructor<?> targetCtor = null;
113+
for (Constructor<?> c : SparkPlanInfo.class.getConstructors()) {
114+
if (c.getParameterCount() == 5) {
115+
targetCtor = c;
116+
break;
117+
}
118+
}
119+
if (targetCtor != null) {
120+
Object newInst =
121+
targetCtor.newInstance(
122+
planInfo.nodeName(),
123+
planInfo.simpleString(),
124+
planInfo.children(),
125+
meta,
126+
planInfo.metrics());
127+
planInfo = (SparkPlanInfo) newInst;
128+
}
129+
} catch (Throwable ignored) {
130+
}
112131
}
113132
}
114133
}

dd-java-agent/instrumentation/spark/spark_2.13/src/main/java/datadog/trace/instrumentation/spark/Spark213Instrumentation.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import datadog.trace.agent.tooling.InstrumenterModule;
88
import datadog.trace.api.Config;
99
import de.thetaphi.forbiddenapis.SuppressForbidden;
10+
import java.lang.reflect.Constructor;
1011
import net.bytebuddy.asm.Advice;
1112
import org.apache.spark.SparkContext;
1213
import org.apache.spark.sql.execution.SparkPlan;
@@ -15,6 +16,7 @@
1516
import org.slf4j.LoggerFactory;
1617
import scala.collection.JavaConverters;
1718
import scala.collection.immutable.HashMap;
19+
import scala.collection.immutable.Map;
1820

1921
@AutoService(InstrumenterModule.class)
2022
public class Spark213Instrumentation extends AbstractSparkInstrumentation {
@@ -104,12 +106,11 @@ public static void exit(
104106
&& (Config.get().isDataJobsParseSparkPlanEnabled()
105107
|| Config.get().isDataJobsExperimentalFeaturesEnabled())) {
106108
Spark213PlanSerializer planUtils = new Spark213PlanSerializer();
107-
scala.collection.immutable.Map<String, String> meta =
109+
Map<String, String> meta =
108110
HashMap.from(JavaConverters.asScala(planUtils.extractFormattedProduct(plan)));
109111
try {
110-
Class<?> spiClass = Class.forName("org.apache.spark.sql.execution.SparkPlanInfo");
111-
java.lang.reflect.Constructor<?> targetCtor = null;
112-
for (java.lang.reflect.Constructor<?> c : spiClass.getConstructors()) {
112+
Constructor<?> targetCtor = null;
113+
for (Constructor<?> c : SparkPlanInfo.class.getConstructors()) {
113114
if (c.getParameterCount() == 5) {
114115
targetCtor = c;
115116
break;

0 commit comments

Comments
 (0)