Skip to content

Commit 61f7bdb

Browse files
committed
Databricks log debugging
1 parent 80b72c5 commit 61f7bdb

File tree

5 files changed

+80
-11
lines changed

5 files changed

+80
-11
lines changed

dd-java-agent/instrumentation/spark/spark_2.12/src/main/java/datadog/trace/instrumentation/spark/Spark212PlanUtils.java

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
import datadog.trace.util.MethodHandles;
44
import java.lang.invoke.MethodHandle;
5+
import java.lang.reflect.Method;
6+
import java.util.ArrayList;
57
import java.util.Arrays;
68
import java.util.List;
79
import org.apache.spark.sql.execution.SparkPlanInfo;
@@ -39,6 +41,9 @@ public static SparkPlanInfo upsertSparkPlanInfoMetadata(
3941
// Attempt to create a new SparkPlanInfo with additional metadata replaced
4042
// Since the fields are immutable we must instantiate a new SparkPlanInfo to do this
4143

44+
log.warn("[CHARLES] control log");
45+
System.out.println("[CHARLES] control log");
46+
4247
Object[] standardArgs =
4348
new Object[] {
4449
planInfo.nodeName(),
@@ -49,19 +54,40 @@ public static SparkPlanInfo upsertSparkPlanInfoMetadata(
4954
};
5055

5156
if (databricksConstructor != null) {
52-
List<Object> databricksArgs = Arrays.asList(standardArgs);
57+
List<Method> methods = new ArrayList<>();
58+
try {
59+
methods.add(SparkPlanInfo.class.getMethod("estRowCount"));
60+
methods.add(SparkPlanInfo.class.getMethod("rddScopeId"));
61+
methods.add(SparkPlanInfo.class.getMethod("explainId"));
62+
} catch (Throwable t) {
63+
log.warn("[CHARLES] Error reflecting Databricks args", t);
64+
System.out.printf("[CHARLES] Error reflecting Databricks args: %s\n", t);
65+
t.printStackTrace(System.out);
66+
}
67+
68+
List<Object> databricksArgs = new ArrayList<>(Arrays.asList(standardArgs));
5369
try {
54-
databricksArgs.add(SparkPlanInfo.class.getMethod("estRowCount").invoke(planInfo));
55-
databricksArgs.add(SparkPlanInfo.class.getMethod("rddScopeId").invoke(planInfo));
56-
databricksArgs.add(SparkPlanInfo.class.getMethod("explainId").invoke(planInfo));
70+
if (methods.size() == 3) {
71+
databricksArgs.add(methods.get(0).invoke(planInfo));
72+
databricksArgs.add(methods.get(1).invoke(planInfo));
73+
databricksArgs.add(methods.get(2).invoke(planInfo));
74+
} else {
75+
log.warn("[CHARLES] Something went wrong getting methods");
76+
System.out.println("[CHARLES] Not enough methods, something went wrong reflecting them");
77+
}
5778
} catch (Throwable t) {
58-
log.error("Error reflecting Databricks args", t);
79+
log.warn("[CHARLES] Error invoking Databricks args", t);
80+
System.out.printf("[CHARLES] Error invoking Databricks args: %s\n", t);
81+
t.printStackTrace(System.out);
5982
}
6083

6184
SparkPlanInfo newPlan = methodLoader.invoke(databricksConstructor, databricksArgs.toArray());
6285
if (newPlan != null) {
6386
return newPlan;
6487
}
88+
} else {
89+
log.warn("[CHARLES] No Databricks constructor found");
90+
System.out.println("[CHARLES] No Databricks constructor found");
6591
}
6692

6793
if (constructor != null) {

dd-java-agent/instrumentation/spark/spark_2.13/src/main/java/datadog/trace/instrumentation/spark/Spark213PlanUtils.java

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
import datadog.trace.util.MethodHandles;
44
import java.lang.invoke.MethodHandle;
5+
import java.lang.reflect.Method;
6+
import java.util.ArrayList;
57
import java.util.Arrays;
68
import java.util.List;
79
import org.apache.spark.sql.execution.SparkPlanInfo;
@@ -39,6 +41,9 @@ public static SparkPlanInfo upsertSparkPlanInfoMetadata(
3941
// Attempt to create a new SparkPlanInfo with additional metadata replaced
4042
// Since the fields are immutable we must instantiate a new SparkPlanInfo to do this
4143

44+
log.warn("[CHARLES] control log");
45+
System.out.println("[CHARLES] control log");
46+
4247
Object[] standardArgs =
4348
new Object[] {
4449
planInfo.nodeName(),
@@ -49,19 +54,40 @@ public static SparkPlanInfo upsertSparkPlanInfoMetadata(
4954
};
5055

5156
if (databricksConstructor != null) {
52-
List<Object> databricksArgs = Arrays.asList(standardArgs);
57+
List<Method> methods = new ArrayList<>();
58+
try {
59+
methods.add(SparkPlanInfo.class.getMethod("estRowCount"));
60+
methods.add(SparkPlanInfo.class.getMethod("rddScopeId"));
61+
methods.add(SparkPlanInfo.class.getMethod("explainId"));
62+
} catch (Throwable t) {
63+
log.warn("[CHARLES] Error reflecting Databricks args", t);
64+
System.out.printf("[CHARLES] Error reflecting Databricks args: %s\n", t);
65+
t.printStackTrace(System.out);
66+
}
67+
68+
List<Object> databricksArgs = new ArrayList<>(Arrays.asList(standardArgs));
5369
try {
54-
databricksArgs.add(SparkPlanInfo.class.getMethod("estRowCount").invoke(planInfo));
55-
databricksArgs.add(SparkPlanInfo.class.getMethod("rddScopeId").invoke(planInfo));
56-
databricksArgs.add(SparkPlanInfo.class.getMethod("explainId").invoke(planInfo));
70+
if (methods.size() == 3) {
71+
databricksArgs.add(methods.get(0).invoke(planInfo));
72+
databricksArgs.add(methods.get(1).invoke(planInfo));
73+
databricksArgs.add(methods.get(2).invoke(planInfo));
74+
} else {
75+
log.warn("[CHARLES] Something went wrong getting methods");
76+
System.out.println("[CHARLES] Not enough methods, something went wrong reflecting them");
77+
}
5778
} catch (Throwable t) {
58-
log.error("Error reflecting Databricks args", t);
79+
log.warn("[CHARLES] Error invoking Databricks args", t);
80+
System.out.printf("[CHARLES] Error invoking Databricks args: %s\n", t);
81+
t.printStackTrace(System.out);
5982
}
6083

6184
SparkPlanInfo newPlan = methodLoader.invoke(databricksConstructor, databricksArgs.toArray());
6285
if (newPlan != null) {
6386
return newPlan;
6487
}
88+
} else {
89+
log.warn("[CHARLES] No Databricks constructor found");
90+
System.out.println("[CHARLES] No Databricks constructor found");
6591
}
6692

6793
if (constructor != null) {

dd-trace-api/src/main/java/datadog/trace/api/ConfigDefaults.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ public final class ConfigDefaults {
233233
static final boolean DEFAULT_DATA_JOBS_OPENLINEAGE_ENABLED = false;
234234
static final boolean DEFAULT_DATA_JOBS_OPENLINEAGE_TIMEOUT_ENABLED = true;
235235
static final boolean DEFAULT_DATA_JOBS_PARSE_SPARK_PLAN_ENABLED = false;
236-
static final boolean DEFAULT_DATA_JOBS_EXPERIMENTAL_FEATURES_ENABLED = false;
236+
static final boolean DEFAULT_DATA_JOBS_EXPERIMENTAL_FEATURES_ENABLED = true;
237237

238238
static final boolean DEFAULT_DATA_STREAMS_ENABLED = false;
239239
static final int DEFAULT_DATA_STREAMS_BUCKET_DURATION = 10; // seconds

dd-trace-core/src/main/java/datadog/trace/core/StatusLogger.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,12 @@ public void toJson(JsonWriter writer, Config config) throws IOException {
157157
writer.name("data_streams_enabled");
158158
writer.value(config.isDataStreamsEnabled());
159159
writer.endObject();
160+
writer.name("data_jobs_experimental_features_enabled");
161+
writer.value(config.isDataJobsExperimentalFeaturesEnabled());
162+
writer.endObject();
163+
writer.name("data_jobs_parse_spark_plan_enabled");
164+
writer.value(config.isDataJobsParseSparkPlanEnabled());
165+
writer.endObject();
160166
}
161167

162168
private static boolean agentServiceCheck(Config config) {

internal-api/src/main/java/datadog/trace/util/MethodHandles.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,17 @@ public MethodHandle constructor(Class<?> clazz, Class<?>... parameterTypes) {
127127
Arrays.toString(parameterTypes),
128128
clazz.getName(),
129129
t);
130+
log.warn(
131+
"[CHARLES] Could not get constructor accepting {} from class {}",
132+
Arrays.toString(parameterTypes),
133+
clazz.getName(),
134+
t);
135+
System.out.println(
136+
"[CHARLES] Could not get constructor accepting "
137+
+ Arrays.toString(parameterTypes)
138+
+ " from class "
139+
+ clazz.getName());
140+
System.out.println(t);
130141
return null;
131142
}
132143
});

0 commit comments

Comments
 (0)