Skip to content

Commit 174531c

Browse files
HeartSaVioRsrowen
authored andcommitted
[MINOR][CORE] Leverage modified Utils.classForName to reduce scalastyle off for Class.forName
## What changes were proposed in this pull request? This patch modifies Utils.classForName to have optional parameters - initialize, noSparkClassLoader - to let callers of Class.forName with thread context classloader to use it instead. This helps to reduce scalastyle off for Class.forName. ## How was this patch tested? Existing UTs. Closes apache#24148 from HeartSaVioR/MINOR-reduce-scalastyle-off-for-class-forname. Authored-by: Jungtaek Lim (HeartSaVioR) <[email protected]> Signed-off-by: Sean Owen <[email protected]>
1 parent a529be2 commit 174531c

File tree

6 files changed

+41
-54
lines changed

6 files changed

+41
-54
lines changed

core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala

Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,6 @@ class KryoSerializer(conf: SparkConf)
130130
val kryo = instantiator.newKryo()
131131
kryo.setRegistrationRequired(registrationRequired)
132132

133-
val oldClassLoader = Thread.currentThread.getContextClassLoader
134133
val classLoader = defaultClassLoader.getOrElse(Thread.currentThread.getContextClassLoader)
135134

136135
// Allow disabling Kryo reference tracking if user knows their object graphs don't have loops.
@@ -156,24 +155,22 @@ class KryoSerializer(conf: SparkConf)
156155
kryo.register(classOf[GenericRecord], new GenericAvroSerializer(avroSchemas))
157156
kryo.register(classOf[GenericData.Record], new GenericAvroSerializer(avroSchemas))
158157

159-
try {
160-
// scalastyle:off classforname
161-
// Use the default classloader when calling the user registrator.
162-
Thread.currentThread.setContextClassLoader(classLoader)
163-
// Register classes given through spark.kryo.classesToRegister.
164-
classesToRegister
165-
.foreach { className => kryo.register(Class.forName(className, true, classLoader)) }
166-
// Allow the user to register their own classes by setting spark.kryo.registrator.
167-
userRegistrators
168-
.map(Class.forName(_, true, classLoader).getConstructor().
169-
newInstance().asInstanceOf[KryoRegistrator])
170-
.foreach { reg => reg.registerClasses(kryo) }
171-
// scalastyle:on classforname
172-
} catch {
173-
case e: Exception =>
174-
throw new SparkException(s"Failed to register classes with Kryo", e)
175-
} finally {
176-
Thread.currentThread.setContextClassLoader(oldClassLoader)
158+
// Use the default classloader when calling the user registrator.
159+
Utils.withContextClassLoader(classLoader) {
160+
try {
161+
// Register classes given through spark.kryo.classesToRegister.
162+
classesToRegister.foreach { className =>
163+
kryo.register(Utils.classForName(className, noSparkClassLoader = true))
164+
}
165+
// Allow the user to register their own classes by setting spark.kryo.registrator.
166+
userRegistrators
167+
.map(Utils.classForName(_, noSparkClassLoader = true).getConstructor().
168+
newInstance().asInstanceOf[KryoRegistrator])
169+
.foreach { reg => reg.registerClasses(kryo) }
170+
} catch {
171+
case e: Exception =>
172+
throw new SparkException(s"Failed to register classes with Kryo", e)
173+
}
177174
}
178175

179176
// Register Chill's classes; we do this after our ranges and the user's own classes to let

core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -378,10 +378,8 @@ private[spark] object ClosureCleaner extends Logging {
378378
} else {
379379
logDebug(s"Cleaning lambda: ${lambdaFunc.get.getImplMethodName}")
380380

381-
// scalastyle:off classforname
382-
val captClass = Class.forName(lambdaFunc.get.getCapturingClass.replace('/', '.'),
383-
false, Thread.currentThread.getContextClassLoader)
384-
// scalastyle:on classforname
381+
val captClass = Utils.classForName(lambdaFunc.get.getCapturingClass.replace('/', '.'),
382+
initialize = false, noSparkClassLoader = true)
385383
// Fail fast if we detect return statements in closures
386384
getClassReader(captClass)
387385
.accept(new ReturnStatementFinder(Some(lambdaFunc.get.getImplMethodName)), 0)
@@ -547,12 +545,8 @@ private class InnerClosureFinder(output: Set[Class[_]]) extends ClassVisitor(ASM
547545
if (op == INVOKESPECIAL && name == "<init>" && argTypes.length > 0
548546
&& argTypes(0).toString.startsWith("L") // is it an object?
549547
&& argTypes(0).getInternalName == myName) {
550-
// scalastyle:off classforname
551-
output += Class.forName(
552-
owner.replace('/', '.'),
553-
false,
554-
Thread.currentThread.getContextClassLoader)
555-
// scalastyle:on classforname
548+
output += Utils.classForName(owner.replace('/', '.'),
549+
initialize = false, noSparkClassLoader = true)
556550
}
557551
}
558552
}

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -189,15 +189,23 @@ private[spark] object Utils extends Logging {
189189

190190
/** Determines whether the provided class is loadable in the current thread. */
191191
def classIsLoadable(clazz: String): Boolean = {
192-
// scalastyle:off classforname
193-
Try { Class.forName(clazz, false, getContextOrSparkClassLoader) }.isSuccess
194-
// scalastyle:on classforname
192+
Try { classForName(clazz, initialize = false) }.isSuccess
195193
}
196194

197195
// scalastyle:off classforname
198-
/** Preferred alternative to Class.forName(className) */
199-
def classForName(className: String): Class[_] = {
200-
Class.forName(className, true, getContextOrSparkClassLoader)
196+
/**
197+
* Preferred alternative to Class.forName(className), as well as
198+
* Class.forName(className, initialize, loader) with current thread's ContextClassLoader.
199+
*/
200+
def classForName(
201+
className: String,
202+
initialize: Boolean = true,
203+
noSparkClassLoader: Boolean = false): Class[_] = {
204+
if (!noSparkClassLoader) {
205+
Class.forName(className, initialize, getContextOrSparkClassLoader)
206+
} else {
207+
Class.forName(className, initialize, Thread.currentThread().getContextClassLoader)
208+
}
201209
// scalastyle:on classforname
202210
}
203211

core/src/test/scala/org/apache/spark/FileSuite.scala

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -200,30 +200,23 @@ class FileSuite extends SparkFunSuite with LocalSparkContext {
200200
}
201201

202202
test("object files of classes from a JAR") {
203-
// scalastyle:off classforname
204-
val original = Thread.currentThread().getContextClassLoader
205203
val className = "FileSuiteObjectFileTest"
206204
val jar = TestUtils.createJarWithClasses(Seq(className))
207205
val loader = new java.net.URLClassLoader(Array(jar), Utils.getContextOrSparkClassLoader)
208-
Thread.currentThread().setContextClassLoader(loader)
209-
try {
206+
207+
Utils.withContextClassLoader(loader) {
210208
sc = new SparkContext("local", "test")
211209
val objs = sc.makeRDD(1 to 3).map { x =>
212-
val loader = Thread.currentThread().getContextClassLoader
213-
Class.forName(className, true, loader).getConstructor().newInstance()
210+
Utils.classForName(className, noSparkClassLoader = true).getConstructor().newInstance()
214211
}
215212
val outputDir = new File(tempDir, "output").getAbsolutePath
216213
objs.saveAsObjectFile(outputDir)
217214
// Try reading the output back as an object file
218-
val ct = reflect.ClassTag[Any](Class.forName(className, true, loader))
215+
val ct = reflect.ClassTag[Any](Utils.classForName(className, noSparkClassLoader = true))
219216
val output = sc.objectFile[Any](outputDir)
220217
assert(output.collect().size === 3)
221218
assert(output.collect().head.getClass.getName === className)
222219
}
223-
finally {
224-
Thread.currentThread().setContextClassLoader(original)
225-
}
226-
// scalastyle:on classforname
227220
}
228221

229222
test("write SequenceFile using new Hadoop API") {

core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,10 +57,8 @@ object KryoDistributedTest {
5757

5858
class AppJarRegistrator extends KryoRegistrator {
5959
override def registerClasses(k: Kryo) {
60-
val classLoader = Thread.currentThread.getContextClassLoader
61-
// scalastyle:off classforname
62-
k.register(Class.forName(AppJarRegistrator.customClassName, true, classLoader))
63-
// scalastyle:on classforname
60+
k.register(Utils.classForName(AppJarRegistrator.customClassName,
61+
noSparkClassLoader = true))
6462
}
6563
}
6664

core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -134,10 +134,7 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
134134

135135
try {
136136
sc.makeRDD(1 to 5, 2).mapPartitions { x =>
137-
val loader = Thread.currentThread().getContextClassLoader
138-
// scalastyle:off classforname
139-
Class.forName(className, true, loader).getConstructor().newInstance()
140-
// scalastyle:on classforname
137+
Utils.classForName(className, noSparkClassLoader = true).getConstructor().newInstance()
141138
Seq().iterator
142139
}.count()
143140
}

0 commit comments

Comments
 (0)