Skip to content

Commit 4f3f20d

Browse files
committed
#811 Fix PR suggestions (Thanks @coderabbitai).
1 parent 02705a9 commit 4f3f20d

File tree

3 files changed

+6
-5
lines changed

3 files changed

+6
-5
lines changed

cobol-parser/src/main/scala/za/co/absa/cobrix/cobol/reader/VarLenNestedReader.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ class VarLenNestedReader[T: ClassTag](copybookContents: Seq[String],
4444
handler: RecordHandler[T]) extends VarLenReader with Logging with Serializable {
4545
private val DEFAULT_INDEX_SIZE_COMPRESSED_FILES_MB = 1024
4646
private val DEFAULT_FS_INDEX_SIZE_MULTIPLIER = 4
47+
private val DEFAULT_MAX_FS_BASED_SPLIT_SIZE_MB = 256
4748

4849
protected val cobolSchema: CobolSchema = loadCopyBook(copybookContents)
4950

@@ -221,10 +222,10 @@ class VarLenNestedReader[T: ClassTag](copybookContents: Seq[String],
221222
if (isCompressed) {
222223
readerProperties.inputSplitSizeCompressedMB.orElse(Some(DEFAULT_INDEX_SIZE_COMPRESSED_FILES_MB))
223224
} else {
224-
val defaultIndexSizeBofFsBlock = readerProperties.fsDefaultBlockSize.map { size =>
225-
Math.min(size * DEFAULT_FS_INDEX_SIZE_MULTIPLIER, 256)
225+
val defaultIndexSizeBasedOnFsBlock = readerProperties.fsDefaultBlockSize.map { size =>
226+
Math.min(size * DEFAULT_FS_INDEX_SIZE_MULTIPLIER, DEFAULT_MAX_FS_BASED_SPLIT_SIZE_MB)
226227
}
227-
readerProperties.inputSplitSizeMB.orElse(defaultIndexSizeBofFsBlock)
228+
readerProperties.inputSplitSizeMB.orElse(defaultIndexSizeBasedOnFsBlock)
228229
}
229230
}
230231

spark-cobol/src/main/scala/za/co/absa/cobrix/spark/cobol/utils/LRUCache.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package za.co.absa.cobrix.spark.cobol.utils
1919
import scala.collection.JavaConverters._
2020

2121
class LRUCache[K,V](maxSize: Int, loadFactor: Float = 0.75f) {
22-
private val cache = new java.util.LinkedHashMap[K, V](16, loadFactor, true) {
22+
private val cache = new java.util.LinkedHashMap[K, V](Math.min(maxSize, 128), loadFactor, true) {
2323
override def removeEldestEntry(eldest: java.util.Map.Entry[K, V]): Boolean = size() > maxSize
2424
}
2525

spark-cobol/src/test/scala/za/co/absa/cobrix/spark/cobol/source/integration/Test37RecordLengthMappingSpec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ class Test37RecordLengthMappingSpec extends AnyWordSpec with SparkTestBase with
201201
case (k, v) => (k.fileName, v)
202202
}
203203

204-
assert(indexCacheSimplified.get(pathNameAsCached) != null)
204+
assert(indexCacheSimplified.contains(pathNameAsCached))
205205
assert(indexCacheSimplified(pathNameAsCached).length == 2)
206206

207207
assert(actualInitial == expected)

0 commit comments

Comments
 (0)