Skip to content

Commit 9f58d3b

Browse files
maryannxuegatorsmile
authored andcommitted
[SPARK-27236][TEST] Refactor log-appender pattern in tests
## What changes were proposed in this pull request? Refactored code in tests regarding the "withLogAppender()" pattern by creating a general helper method in SparkFunSuite. ## How was this patch tested? Passed existing tests. Closes apache#24172 from maryannxue/log-appender. Authored-by: maryannxue <[email protected]> Signed-off-by: gatorsmile <[email protected]>
1 parent 80565ce commit 9f58d3b

File tree

4 files changed

+30
-30
lines changed

4 files changed

+30
-30
lines changed

core/src/test/scala/org/apache/spark/SparkFunSuite.scala

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark
2020
// scalastyle:off
2121
import java.io.File
2222

23+
import org.apache.log4j.{Appender, Level, Logger}
2324
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
2425

2526
import org.apache.spark.internal.Logging
@@ -117,4 +118,28 @@ abstract class SparkFunSuite
117118
Utils.deleteRecursively(dir)
118119
}
119120
}
121+
122+
/**
123+
* Adds a log appender and optionally sets a log level to the root logger or the logger with
124+
* the specified name, then executes the specified function, and in the end removes the log
125+
* appender and restores the log level if necessary.
126+
*/
127+
protected def withLogAppender(
128+
appender: Appender,
129+
loggerName: Option[String] = None,
130+
level: Option[Level] = None)(
131+
f: => Unit): Unit = {
132+
val logger = loggerName.map(Logger.getLogger).getOrElse(Logger.getRootLogger)
133+
val restoreLevel = logger.getLevel
134+
logger.addAppender(appender)
135+
if (level.isDefined) {
136+
logger.setLevel(level.get)
137+
}
138+
try f finally {
139+
logger.removeAppender(appender)
140+
if (level.isDefined) {
141+
logger.setLevel(restoreLevel)
142+
}
143+
}
144+
}
120145
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -529,7 +529,7 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
529529
}
530530

531531
val appender = new MockAppender()
532-
withLogAppender(appender) {
532+
withLogAppender(appender, loggerName = Some(classOf[CodeGenerator[_, _]].getName)) {
533533
val x = 42
534534
val expr = HugeCodeIntExpression(x)
535535
val proj = GenerateUnsafeProjection.generate(Seq(expr))
@@ -538,15 +538,6 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
538538
}
539539
assert(appender.seenMessage)
540540
}
541-
542-
private def withLogAppender(appender: Appender)(f: => Unit): Unit = {
543-
val logger =
544-
Logger.getLogger(classOf[CodeGenerator[_, _]].getName)
545-
logger.addAppender(appender)
546-
try f finally {
547-
logger.removeAppender(appender)
548-
}
549-
}
550541
}
551542

552543
case class HugeCodeIntExpression(value: Int) extends Expression {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -51,20 +51,10 @@ class OptimizerLoggingSuite extends PlanTest {
5151
override def requiresLayout(): Boolean = false
5252
}
5353

54-
private def withLogLevelAndAppender(level: Level, appender: Appender)(f: => Unit): Unit = {
55-
val logger = Logger.getLogger(Optimize.getClass.getName.dropRight(1))
56-
val restoreLevel = logger.getLevel
57-
logger.setLevel(level)
58-
logger.addAppender(appender)
59-
try f finally {
60-
logger.setLevel(restoreLevel)
61-
logger.removeAppender(appender)
62-
}
63-
}
64-
6554
private def verifyLog(expectedLevel: Level, expectedRules: Seq[String]): Unit = {
6655
val logAppender = new MockAppender()
67-
withLogLevelAndAppender(Level.TRACE, logAppender) {
56+
withLogAppender(logAppender,
57+
loggerName = Some(Optimize.getClass.getName.dropRight(1)), level = Some(Level.TRACE)) {
6858
val input = LocalRelation('a.int, 'b.string, 'c.double)
6959
val query = input.select('a, 'b).select('a).where('a > 1).analyze
7060
val expected = input.where('a > 1).select('a).analyze

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1697,21 +1697,17 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils with Te
16971697
}
16981698

16991699
val testAppender1 = new TestAppender
1700-
LogManager.getRootLogger.addAppender(testAppender1)
1701-
try {
1700+
withLogAppender(testAppender1) {
17021701
val ds = Seq("columnA,columnB", "1.0,1000.0").toDS()
17031702
val ischema = new StructType().add("columnB", DoubleType).add("columnA", DoubleType)
17041703

17051704
spark.read.schema(ischema).option("header", true).option("enforceSchema", true).csv(ds)
1706-
} finally {
1707-
LogManager.getRootLogger.removeAppender(testAppender1)
17081705
}
17091706
assert(testAppender1.events.asScala
17101707
.exists(msg => msg.getRenderedMessage.contains("CSV header does not conform to the schema")))
17111708

17121709
val testAppender2 = new TestAppender
1713-
LogManager.getRootLogger.addAppender(testAppender2)
1714-
try {
1710+
withLogAppender(testAppender2) {
17151711
withTempPath { path =>
17161712
val oschema = new StructType().add("f1", DoubleType).add("f2", DoubleType)
17171713
val odf = spark.createDataFrame(List(Row(1.0, 1234.5)).asJava, oschema)
@@ -1724,8 +1720,6 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils with Te
17241720
.csv(path.getCanonicalPath)
17251721
.collect()
17261722
}
1727-
} finally {
1728-
LogManager.getRootLogger.removeAppender(testAppender2)
17291723
}
17301724
assert(testAppender2.events.asScala
17311725
.exists(msg => msg.getRenderedMessage.contains("CSV header does not conform to the schema")))

0 commit comments

Comments
 (0)