|
4 | 4 | import datadog.trace.bootstrap.instrumentation.api.AgentTracer; |
5 | 5 | import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; |
6 | 6 | import datadog.trace.bootstrap.instrumentation.decorator.BaseDecorator; |
| 7 | +import datadog.trace.util.MethodHandles; |
7 | 8 | import java.lang.invoke.MethodHandle; |
8 | | -import java.lang.invoke.MethodHandles; |
9 | | -import java.lang.reflect.Field; |
10 | 9 | import java.util.Properties; |
11 | 10 | import org.apache.spark.executor.Executor; |
12 | 11 | import org.apache.spark.executor.TaskMetrics; |
13 | | -import org.slf4j.Logger; |
14 | | -import org.slf4j.LoggerFactory; |
15 | 12 |
|
16 | 13 | public class SparkExecutorDecorator extends BaseDecorator { |
17 | | - private static final Logger log = LoggerFactory.getLogger(SparkExecutorDecorator.class); |
18 | | - |
19 | 14 | public static final CharSequence SPARK_TASK = UTF8BytesString.create("spark.task"); |
20 | 15 | public static final CharSequence SPARK = UTF8BytesString.create("spark"); |
21 | 16 | public static SparkExecutorDecorator DECORATE = new SparkExecutorDecorator(); |
22 | 17 | private final String propSparkAppName = "spark.app.name"; |
23 | 18 | private static final String TASK_DESCRIPTION_CLASSNAME = |
24 | 19 | "org.apache.spark.scheduler.TaskDescription"; |
25 | | - private static final MethodHandles.Lookup lookup = MethodHandles.lookup(); |
26 | | - private static final MethodHandle propertiesField_mh; |
27 | | - |
28 | | - private static Class<?> initClass() { |
29 | | - try { |
30 | | - return Class.forName( |
31 | | - SparkExecutorDecorator.TASK_DESCRIPTION_CLASSNAME, |
32 | | - false, |
33 | | - SparkExecutorDecorator.class.getClassLoader()); |
34 | | - } catch (ClassNotFoundException e) { |
35 | | - log.debug("Can't find class '{}'", TASK_DESCRIPTION_CLASSNAME, e); |
36 | | - } |
37 | | - return null; |
38 | | - } |
| 20 | + private static final MethodHandle propertiesField_mh = getFieldGetter(); |
39 | 21 |
|
40 | 22 | private static MethodHandle getFieldGetter() { |
41 | | - Class<?> cls = initClass(); |
42 | | - |
43 | 23 | try { |
44 | | - if (cls != null) { |
45 | | - Field field = cls.getDeclaredField("properties"); |
46 | | - field.setAccessible(true); |
47 | | - |
48 | | - return lookup.unreflectGetter(field); |
49 | | - } |
50 | | - |
51 | | - } catch (NoSuchFieldException | IllegalAccessException e) { |
52 | | - log.debug("Can't find and unreflect declared field for '{}'", TASK_DESCRIPTION_CLASSNAME); |
| 24 | + return new MethodHandles(Executor.class.getClassLoader()) |
| 25 | + .privateFieldGetter(TASK_DESCRIPTION_CLASSNAME, "properties"); |
| 26 | + } catch (Throwable ignored) { |
| 27 | + // should be already logged |
53 | 28 | } |
54 | | - |
55 | 29 | return null; |
56 | 30 | } |
57 | 31 |
|
58 | | - static { |
59 | | - propertiesField_mh = getFieldGetter(); |
60 | | - } |
61 | | - |
62 | 32 | @Override |
63 | 33 | protected String[] instrumentationNames() { |
64 | 34 | return new String[] {"spark-executor"}; |
|
0 commit comments