Skip to content

Commit ab9bf52

Browse files
Feature/253 remove log4j2 (#256)
* Remove log4j2 * Log4j is not configured initially, only after some time * Use Logging trait to ensure initialization of log config
1 parent c7cad81 commit ab9bf52

File tree

36 files changed

+87
-311
lines changed

36 files changed

+87
-311
lines changed

component-archetype/src/main/resources/archetype-resources/src/main/scala/reader/mycomponent/MyStreamReaderImpl.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package ${package}.reader.mycomponent
1919

2020
import org.apache.commons.configuration2.Configuration
21-
import org.apache.logging.log4j.LogManager
21+
import org.slf4j.LoggerFactory
2222
import org.apache.spark.sql.{DataFrame, SparkSession}
2323
import za.co.absa.hyperdrive.ingestor.api.reader.{StreamReader, StreamReaderFactory, StreamReaderFactoryProvider}
2424
import za.co.absa.hyperdrive.ingestor.api.{HasComponentAttributes, PropertyMetadata}
@@ -33,7 +33,7 @@ private[reader] class MyStreamReaderImpl() extends StreamReader {
3333
}
3434

3535
object MyStreamReaderImpl extends StreamReaderFactory with MyStreamReaderImplAttributes {
36-
private val logger = LogManager.getLogger
36+
private val logger = LoggerFactory.getLogger(this.getClass)
3737

3838
override def apply(conf: Configuration): StreamReader = {
3939
logger.info("Building MyStreamReaderImpl")

component-archetype/src/main/resources/archetype-resources/src/main/scala/transformer/mycomponent/MyStreamTransformerImpl.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package ${package}.transformer.mycomponent
1919

2020
import org.apache.commons.configuration2.Configuration
21-
import org.apache.logging.log4j.LogManager
21+
import org.slf4j.LoggerFactory
2222
import org.apache.spark.sql.DataFrame
2323
import za.co.absa.hyperdrive.ingestor.api.transformer.{StreamTransformer, StreamTransformerFactory, StreamTransformerFactoryProvider}
2424
import za.co.absa.hyperdrive.ingestor.api.{HasComponentAttributes, PropertyMetadata}
@@ -33,7 +33,7 @@ private[transformer] class MyStreamTransformerImpl() extends StreamTransformer {
3333
}
3434

3535
object MyStreamTransformerImpl extends StreamTransformerFactory with MyStreamTransformerImplAttributes {
36-
private val logger = LogManager.getLogger
36+
private val logger = LoggerFactory.getLogger(this.getClass)
3737

3838
override def apply(conf: Configuration): StreamTransformer = {
3939
logger.info("Building MyStreamTransformerImpl")

component-archetype/src/main/resources/archetype-resources/src/main/scala/writer/mycomponent/MyStreamWriterImpl.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package ${package}.writer.mycomponent
1919

2020
import org.apache.commons.configuration2.Configuration
21-
import org.apache.logging.log4j.LogManager
21+
import org.slf4j.LoggerFactory
2222
import org.apache.spark.sql.DataFrame
2323
import org.apache.spark.sql.streaming.StreamingQuery
2424
import za.co.absa.hyperdrive.ingestor.api.writer.{StreamWriter, StreamWriterFactory, StreamWriterFactoryProvider}
@@ -34,7 +34,7 @@ private[writer] class MyStreamWriterImpl(val destination: String) extends Stream
3434
}
3535

3636
object MyStreamWriterImpl extends StreamWriterFactory with MyStreamWriterImplAttributes {
37-
private val logger = LogManager.getLogger
37+
private val logger = LoggerFactory.getLogger(this.getClass)
3838

3939
override def apply(conf: Configuration): StreamWriter = {
4040
logger.info("Building MyStreamWriterImpl")

component-scanner/src/main/resources/log4j2.xml

Lines changed: 0 additions & 28 deletions
This file was deleted.

component-scanner/src/main/scala/za/co/absa/hyperdrive/scanner/ComponentScanner.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import java.net.URLClassLoader
1919
import java.nio.file.{Files, Path}
2020
import java.util.ServiceLoader
2121

22-
import org.apache.logging.log4j.LogManager
22+
import org.slf4j.LoggerFactory
2323
import za.co.absa.hyperdrive.ingestor.api.reader.{StreamReaderFactory, StreamReaderFactoryProvider}
2424
import za.co.absa.hyperdrive.ingestor.api.transformer.{StreamTransformerFactory, StreamTransformerFactoryProvider}
2525
import za.co.absa.hyperdrive.ingestor.api.writer.{StreamWriterFactory, StreamWriterFactoryProvider}
@@ -37,7 +37,7 @@ case class ComponentDescriptor(attributes: HasComponentAttributes,
3737
jarPath: Path)
3838

3939
object ComponentScanner {
40-
private val logger = LogManager.getLogger
40+
private val logger = LoggerFactory.getLogger(this.getClass)
4141
private val jarSuffix = ".jar"
4242

4343
def getComponents(baseDirectory: Path): Try[ComponentDescriptors] = getComponents(List(baseDirectory))

driver/src/main/resources/log4j2.xml

Lines changed: 0 additions & 28 deletions
This file was deleted.

driver/src/main/scala/za/co/absa/hyperdrive/driver/IngestionDriver.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
package za.co.absa.hyperdrive.driver
1717

1818
import org.apache.commons.configuration2.Configuration
19-
import org.apache.logging.log4j.LogManager
19+
import org.slf4j.LoggerFactory
2020
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReader
2121
import za.co.absa.hyperdrive.ingestor.api.transformer.StreamTransformer
2222
import za.co.absa.hyperdrive.ingestor.api.writer.StreamWriter
@@ -25,7 +25,7 @@ import za.co.absa.hyperdrive.ingestor.implementation.transformer.factories.Strea
2525
import za.co.absa.hyperdrive.ingestor.implementation.writer.factories.StreamWriterAbstractFactory
2626

2727
private[driver] class IngestionDriver {
28-
private val logger = LogManager.getLogger
28+
private val logger = LoggerFactory.getLogger(this.getClass)
2929
val ListDelimiter = ','
3030

3131
def ingest(configuration: Configuration): Unit = {

driver/src/main/scala/za/co/absa/hyperdrive/driver/SparkIngestor.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ package za.co.absa.hyperdrive.driver
1818
import java.util.UUID
1919

2020
import org.apache.commons.configuration2.Configuration
21-
import org.apache.logging.log4j.LogManager
21+
import org.slf4j.LoggerFactory
2222
import org.apache.spark.sql.SparkSession
2323
import za.co.absa.hyperdrive.driver.TerminationMethodEnum.{AwaitTermination, ProcessAllAvailable, TerminationMethod}
2424
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReader
@@ -39,7 +39,7 @@ class SparkIngestor(val spark: SparkSession,
3939
val awaitTerminationTimeout: Option[Long],
4040
val conf: Configuration) {
4141

42-
private val logger = LogManager.getLogger
42+
private val logger = LoggerFactory.getLogger(this.getClass)
4343

4444
/**
4545
* This method performs the ingestion according to the components it receives.
@@ -100,7 +100,7 @@ class SparkIngestor(val spark: SparkSession,
100100

101101
object SparkIngestor extends SparkIngestorAttributes {
102102

103-
private val logger = LogManager.getLogger
103+
private val logger = LoggerFactory.getLogger(this.getClass)
104104

105105
def apply(conf: Configuration): SparkIngestor = {
106106
ComponentFactoryUtil.validateConfiguration(conf, getProperties)

driver/src/main/scala/za/co/absa/hyperdrive/driver/drivers/CommandLineIngestionDriver.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,25 +19,24 @@ import org.apache.commons.configuration2.builder.BasicConfigurationBuilder
1919
import org.apache.commons.configuration2.builder.fluent.Parameters
2020
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler
2121
import org.apache.commons.configuration2.{BaseConfiguration, Configuration}
22-
import org.apache.logging.log4j.LogManager
22+
import org.apache.spark.internal.Logging
2323
import za.co.absa.hyperdrive.driver.IngestionDriver
2424
import za.co.absa.hyperdrive.driver.utils.DriverUtil
2525

26-
object CommandLineIngestionDriver extends IngestionDriver {
26+
object CommandLineIngestionDriver extends IngestionDriver with Logging {
2727

28-
private val logger = LogManager.getLogger
2928
private val PropertyDelimiter = "="
3029

3130
def main(args: Array[String]): Unit = {
3231
if (args.isEmpty) {
3332
throw new IllegalArgumentException("No configuration provided.")
3433
}
3534

36-
logger.info(s"Starting Hyperdrive ${DriverUtil.getVersionString}")
35+
logInfo(s"Starting Hyperdrive ${DriverUtil.getVersionString}")
3736

38-
logger.info(s"Going to load ${args.length} configurations from command line.")
37+
logInfo(s"Going to load ${args.length} configurations from command line.")
3938
val configuration = parseConfiguration(args)
40-
logger.info("Configuration loaded. Going to invoke ingestion.")
39+
logInfo("Configuration loaded. Going to invoke ingestion.")
4140
ingest(configuration)
4241
}
4342

driver/src/main/scala/za/co/absa/hyperdrive/driver/drivers/PropertiesIngestionDriver.scala

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,36 +16,33 @@
1616
package za.co.absa.hyperdrive.driver.drivers
1717

1818
import java.nio.file.{Files, Paths}
19-
2019
import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder
2120
import org.apache.commons.configuration2.builder.fluent.Parameters
2221
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler
2322
import org.apache.commons.configuration2.{Configuration, PropertiesConfiguration}
24-
import org.apache.logging.log4j.LogManager
23+
import org.apache.spark.internal.Logging
2524
import za.co.absa.hyperdrive.driver.IngestionDriver
2625
import za.co.absa.hyperdrive.driver.utils.DriverUtil
2726

2827
/**
2928
* This driver launches ingestion by loading the configurations from a properties file.
3029
*/
31-
object PropertiesIngestionDriver extends IngestionDriver {
32-
33-
private val logger = LogManager.getLogger
30+
object PropertiesIngestionDriver extends IngestionDriver with Logging {
3431

3532
def main(args: Array[String]): Unit = {
3633
val propertiesFile = getPropertiesFilePath(args)
3734
if (propertiesFile.isEmpty) {
3835
throw new IllegalArgumentException("No properties file supplied.")
3936
}
40-
logger.info(s"Starting Hyperdrive ${DriverUtil.getVersionString}")
37+
logInfo(s"Starting Hyperdrive ${DriverUtil.getVersionString}")
4138

4239
if (isInvalid(propertiesFile.get)) {
4340
throw new IllegalArgumentException(s"Invalid properties file: '${propertiesFile.get}'.")
4441
}
4542

46-
logger.info(s"Going to load ingestion configurations from '${propertiesFile.get}'.")
43+
logInfo(s"Going to load ingestion configurations from '${propertiesFile.get}'.")
4744
val configurations = loadConfiguration(propertiesFile.get)
48-
logger.info(s"Configurations loaded. Going to invoke ingestion: [$configurations]")
45+
logInfo(s"Configurations loaded. Going to invoke ingestion: [$configurations]")
4946
ingest(configurations)
5047
}
5148

@@ -63,7 +60,7 @@ object PropertiesIngestionDriver extends IngestionDriver {
6360
case v if v == 0 => None
6461
case v =>
6562
if (v > 1) {
66-
logger.warn(s"Expected only properties file path, but got extra parameters. Returning first as the path. All parameters = [${args.mkString(",")}]")
63+
logWarning(s"Expected only properties file path, but got extra parameters. Returning first as the path. All parameters = [${args.mkString(",")}]")
6764
}
6865
Some(args(0))
6966
}

0 commit comments

Comments
 (0)