@@ -19,14 +19,17 @@ package za.co.absa.cobrix.spark.cobol.utils
1919import org .apache .commons .io .IOUtils
2020import org .apache .hadoop .conf .Configuration
2121import org .apache .hadoop .fs .{FileSystem , Path }
22+ import org .slf4j .{Logger , LoggerFactory }
2223
2324import java .nio .charset .StandardCharsets
2425import scala .collection .JavaConverters ._
26+ import scala .util .control .NonFatal
2527
2628/**
2729 * This object provides utility methods for interacting with HDFS internals.
2830 */
2931object HDFSUtils {
32+ private val log : Logger = LoggerFactory .getLogger(this .getClass)
3033 final val bytesInMegabyte = 1048576
3134
3235 /**
@@ -72,16 +75,22 @@ object HDFSUtils {
7275 */
7376 def getHDFSDefaultBlockSizeMB (fileSystem : FileSystem , path : Option [String ] = None ): Option [Int ] = {
7477 val hdfsPath = new Path (path.getOrElse(" /" ))
75- val blockSizeInBytes = fileSystem.getDefaultBlockSize(hdfsPath)
76- if (blockSizeInBytes > 0 ) {
77- val blockSizeInBM = (blockSizeInBytes / bytesInMegabyte).toInt
78- if (blockSizeInBM> 0 ) {
79- Some (blockSizeInBM)
78+ try {
79+ val blockSizeInBytes = fileSystem.getDefaultBlockSize(hdfsPath)
80+ if (blockSizeInBytes > 0 ) {
81+ val blockSizeInMB = (blockSizeInBytes / bytesInMegabyte).toInt
82+ if (blockSizeInMB > 0 ) {
83+ Some (blockSizeInMB)
84+ } else {
85+ None
86+ }
8087 } else {
8188 None
8289 }
83- } else {
84- None
90+ } catch {
91+ case NonFatal (ex) =>
92+ log.debug(s " Failed to get HDFS default block size for path: $hdfsPath.. " , ex)
93+ None
8594 }
8695 }
8796
0 commit comments