Skip to content

Commit 19f8600

Browse files
committed
#780 Ensure streams are closed after index generation.
1 parent d19209a commit 19f8600

File tree

2 files changed

+9
-3
lines changed

2 files changed

+9
-3
lines changed

cobol-parser/src/main/scala/za/co/absa/cobrix/cobol/processor/CobolProcessor.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,12 +50,13 @@ object CobolProcessor {
5050
private val caseInsensitiveOptions = new mutable.HashMap[String, String]()
5151

5252
def build(): CobolProcessor = {
53+
val readerParameters = getReaderParameters
54+
val cobolSchema = getCobolSchema(readerParameters)
55+
5356
new CobolProcessor {
5457
override def process(inputStream: SimpleStream,
5558
outputStream: OutputStream)
5659
(rawRecordProcessor: RawRecordProcessor): Unit = {
57-
val readerParameters = getReaderParameters
58-
val cobolSchema = getCobolSchema(readerParameters)
5960
val recordExtractor = getRecordExtractor(readerParameters, inputStream)
6061

6162
val dataStream = inputStream.copyStream()

spark-cobol/src/main/scala/za/co/absa/cobrix/spark/cobol/source/index/IndexBuilder.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,12 @@ private[source] object IndexBuilder extends Logging {
163163
logger.info(s"Going to generate index for the file: $filePath")
164164

165165
val (inputStream, headerStream, maximumBytes) = getStreams(filePath, startOffset, endOffset, config)
166-
val index = reader.generateIndex(inputStream, headerStream, fileOrder, reader.isRdwBigEndian)
166+
val index = try {
167+
reader.generateIndex(inputStream, headerStream, fileOrder, reader.isRdwBigEndian)
168+
} finally {
169+
inputStream.close()
170+
headerStream.close()
171+
}
167172

168173
val indexWithEndOffset = if (maximumBytes > 0 ){
169174
index.map(entry => if (entry.offsetTo == -1) entry.copy(offsetTo = startOffset + maximumBytes) else entry)

0 commit comments

Comments
 (0)