Skip to content

Commit 56a0b54

Browse files
Marcelo VanzinHyukjinKwon
authored andcommitted
[SPARK-29399][CORE] Remove old ExecutorPlugin interface
SPARK-29397 added new interfaces for creating driver and executor plugins. These were added in a new, more isolated package that does not pollute the main o.a.s package. The old interface is now redundant. Since it's a DeveloperApi and we're about to have a new major release, let's remove it instead of carrying more baggage forward. Closes apache#26390 from vanzin/SPARK-29399. Authored-by: Marcelo Vanzin <[email protected]> Signed-off-by: HyukjinKwon <[email protected]>
1 parent 45e212e commit 56a0b54

File tree

11 files changed

+20
-379
lines changed

11 files changed

+20
-379
lines changed

core/src/main/java/org/apache/spark/ExecutorPlugin.java

Lines changed: 0 additions & 60 deletions
This file was deleted.

core/src/main/java/org/apache/spark/ExecutorPluginContext.java

Lines changed: 0 additions & 50 deletions
This file was deleted.

core/src/main/java/org/apache/spark/api/plugin/SparkPlugin.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
* A plugin that can be dynamically loaded into a Spark application.
2525
* <p>
2626
* Plugins can be loaded by adding the plugin's class name to the appropriate Spark configuration.
27-
* Check the Spark configuration documentation for details.
27+
* Check the Spark monitoring guide for details.
2828
* <p>
2929
* Plugins have two optional components: a driver-side component, of which a single instance is
3030
* created per application, inside the Spark driver. And an executor-side component, of which one

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -619,7 +619,9 @@ private[spark] object SparkConf extends Logging {
619619
"Not used anymore. Please use spark.shuffle.service.index.cache.size"),
620620
DeprecatedConfig("spark.yarn.credentials.file.retention.count", "2.4.0", "Not used anymore."),
621621
DeprecatedConfig("spark.yarn.credentials.file.retention.days", "2.4.0", "Not used anymore."),
622-
DeprecatedConfig("spark.yarn.services", "3.0.0", "Feature no longer available.")
622+
DeprecatedConfig("spark.yarn.services", "3.0.0", "Feature no longer available."),
623+
DeprecatedConfig("spark.executor.plugins", "3.0.0",
624+
"Feature replaced with new plugin API. See Monitoring documentation.")
623625
)
624626

625627
Map(configs.map { cfg => (cfg.key -> cfg) } : _*)

core/src/main/scala/org/apache/spark/executor/Executor.scala

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -137,35 +137,6 @@ private[spark] class Executor(
137137
// for fetching remote cached RDD blocks, so need to make sure it uses the right classloader too.
138138
env.serializerManager.setDefaultClassLoader(replClassLoader)
139139

140-
private val executorPlugins: Seq[ExecutorPlugin] = {
141-
val pluginNames = conf.get(EXECUTOR_PLUGINS)
142-
if (pluginNames.nonEmpty) {
143-
logInfo(s"Initializing the following plugins: ${pluginNames.mkString(", ")}")
144-
145-
// Plugins need to load using a class loader that includes the executor's user classpath
146-
val pluginList: Seq[ExecutorPlugin] =
147-
Utils.withContextClassLoader(replClassLoader) {
148-
val plugins = Utils.loadExtensions(classOf[ExecutorPlugin], pluginNames, conf)
149-
plugins.foreach { plugin =>
150-
val pluginSource = new ExecutorPluginSource(plugin.getClass().getSimpleName())
151-
val pluginContext = new ExecutorPluginContext(pluginSource.metricRegistry, conf,
152-
executorId, executorHostname, isLocal)
153-
plugin.init(pluginContext)
154-
logInfo("Successfully loaded plugin " + plugin.getClass().getCanonicalName())
155-
if (pluginSource.metricRegistry.getNames.size() > 0) {
156-
env.metricsSystem.registerSource(pluginSource)
157-
}
158-
}
159-
plugins
160-
}
161-
162-
logInfo("Finished initializing plugins")
163-
pluginList
164-
} else {
165-
Nil
166-
}
167-
}
168-
169140
// Plugins need to load using a class loader that includes the executor's user classpath
170141
private val plugins: Option[PluginContainer] = Utils.withContextClassLoader(replClassLoader) {
171142
PluginContainer(env)
@@ -295,14 +266,6 @@ private[spark] class Executor(
295266

296267
// Notify plugins that executor is shutting down so they can terminate cleanly
297268
Utils.withContextClassLoader(replClassLoader) {
298-
executorPlugins.foreach { plugin =>
299-
try {
300-
plugin.shutdown()
301-
} catch {
302-
case e: Exception =>
303-
logWarning("Plugin " + plugin.getClass().getCanonicalName() + " shutdown failed", e)
304-
}
305-
}
306269
plugins.foreach(_.shutdown())
307270
}
308271
if (!isLocal) {

core/src/main/scala/org/apache/spark/executor/ExecutorPluginSource.scala

Lines changed: 0 additions & 30 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1193,16 +1193,6 @@ package object config {
11931193
.toSequence
11941194
.createWithDefault(Nil)
11951195

1196-
private[spark] val EXECUTOR_PLUGINS =
1197-
ConfigBuilder("spark.executor.plugins")
1198-
.doc("Comma-separated list of class names for \"plugins\" implementing " +
1199-
"org.apache.spark.ExecutorPlugin. Plugins have the same privileges as any task " +
1200-
"in a Spark executor. They can also interfere with task execution and fail in " +
1201-
"unexpected ways. So be sure to only use this for trusted plugins.")
1202-
.stringConf
1203-
.toSequence
1204-
.createWithDefault(Nil)
1205-
12061196
private[spark] val CLEANER_PERIODIC_GC_INTERVAL =
12071197
ConfigBuilder("spark.cleaner.periodicGC.interval")
12081198
.timeConf(TimeUnit.SECONDS)

0 commit comments

Comments
 (0)