From be9ee23dd39d353ee9b12cadfddc92a9f02a1d54 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Thu, 10 Apr 2025 15:10:03 +0530 Subject: [PATCH 01/27] Initial commit --- data-loader/build.gradle | 1 + .../dataimport/log/AbstractImportLogger.java | 170 +++++++++++ .../dataimport/log/ImportLoggerConfig.java | 13 + .../dataimport/log/LogStorageLocation.java | 7 + .../log/SingleFileImportLogger.java | 140 +++++++++ .../log/SplitByDataChunkImportLogger.java | 185 ++++++++++++ .../dataimport/log/writer/AwsS3LogWriter.java | 30 ++ .../log/writer/DefaultLogWriterFactory.java | 36 +++ .../log/writer/LocalFileLogWriter.java | 64 +++++ .../dataimport/log/writer/LogFileType.java | 8 + .../core/dataimport/log/writer/LogWriter.java | 15 + .../log/writer/LogWriterFactory.java | 8 + .../log/writer/LogWriterFactoryConfig.java | 15 + .../log/SingleFileImportLoggerTest.java | 271 ++++++++++++++++++ .../log/SplitByDataChunkImportLoggerTest.java | 244 ++++++++++++++++ .../writer/DefaultLogWriterFactoryTest.java | 67 +++++ 16 files changed, 1274 insertions(+) create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java create mode 100644 data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java create mode 100644 data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java create mode 100644 data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java diff --git a/data-loader/build.gradle b/data-loader/build.gradle index 87a057933b..5e9c2a4ba0 100644 --- a/data-loader/build.gradle +++ b/data-loader/build.gradle @@ -17,6 +17,7 @@ subprojects { implementation("org.apache.commons:commons-lang3:${commonsLangVersion}") implementation("commons-io:commons-io:${commonsIoVersion}") implementation("org.slf4j:slf4j-simple:${slf4jVersion}") + implementation("software.amazon.awssdk:s3:${awssdkVersion}") // Mockito testImplementation "org.mockito:mockito-core:${mockitoVersion}" diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java new file mode 100644 index 0000000000..eac36c802c --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -0,0 +1,170 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +import com.fasterxml.jackson.databind.JsonNode; +import com.scalar.db.dataloader.core.Constants; +import com.scalar.db.dataloader.core.DataLoaderObjectMapper; +import com.scalar.db.dataloader.core.dataimport.ImportEventListener; +import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriter; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResult; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResultStatus; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; +import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; +import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchStatus; +import lombok.RequiredArgsConstructor; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +@RequiredArgsConstructor +public abstract class AbstractImportLogger implements ImportEventListener { + + protected static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); + + protected final ImportLoggerConfig config; + protected final LogWriterFactory logWriterFactory; + protected final List listeners = new ArrayList<>(); + + public void addListener(ImportEventListener listener) { + listeners.add(listener); + } + + public void removeListener(ImportEventListener listener) { + listeners.remove(listener); + } + + @Override + public void onDataChunkStarted(ImportDataChunkStatus importDataChunkStatus) { + // Currently we are not logging the start of a data chunk + } + + @Override + public void onTransactionBatchStarted(ImportTransactionBatchStatus batchStatus) { + // Currently we are not logging the start of a transaction batch + notifyTransactionBatchStarted(batchStatus); + } + + @Override + public void onTransactionBatchCompleted(ImportTransactionBatchResult batchResult) { + // skip logging success records if the configuration is set to skip + if (shouldSkipLoggingSuccess(batchResult)) { + return; + } + + logTransactionBatch(batchResult); + notifyTransactionBatchCompleted(batchResult); + } + + @Override + public void onTaskComplete(ImportTaskResult taskResult) { + // TODO: we can remove this event if it's current not being used in the import Manager as well + } + + protected abstract void logTransactionBatch(ImportTransactionBatchResult batchResult); + + protected boolean shouldSkipLoggingSuccess(ImportTransactionBatchResult batchResult) { + return batchResult.isSuccess() && !config.isLogSuccessRecords(); + } + + protected JsonNode createFilteredTransactionBatchLogJsonNode( + ImportTransactionBatchResult batchResult) { + + // If the batch result does not contain any records, return the batch result as is + if (batchResult.getRecords() == null) { + return OBJECT_MAPPER.valueToTree(batchResult); + } + + // Create a new list to store the modified import task results + List modifiedRecords = new ArrayList<>(); + + // Loop over the records in the batchResult + for (ImportTaskResult taskResult : batchResult.getRecords()) { + // Create a new ImportTaskResult and not add the raw record yet + List targetResults = + batchResult.isSuccess() + ? taskResult.getTargets() + : updateTargetStatusForAbortedTransactionBatch(taskResult.getTargets()); + ImportTaskResult.ImportTaskResultBuilder builder = + ImportTaskResult.builder() + .rowNumber(taskResult.getRowNumber()) + .targets(targetResults) + .dataChunkId(taskResult.getDataChunkId()) + .rowNumber(taskResult.getRowNumber()); + + // Only add the raw record if the configuration is set to log raw source data + if (config.isLogRawSourceRecords()) { + builder.rawRecord(taskResult.getRawRecord()); + } + ImportTaskResult modifiedTaskResult = builder.build(); + + // Add the modified task result to the list + modifiedRecords.add(modifiedTaskResult); + } + + // Create a new transaction batch result with the modified import task results + ImportTransactionBatchResult modifiedBatchResult = + ImportTransactionBatchResult.builder() + .dataChunkId(batchResult.getDataChunkId()) + .transactionBatchId(batchResult.getTransactionBatchId()) + .transactionId(batchResult.getTransactionId()) + .records(modifiedRecords) + .errors(batchResult.getErrors()) + .success(batchResult.isSuccess()) + .build(); + + // Convert the modified batch result to a JsonNode + return OBJECT_MAPPER.valueToTree(modifiedBatchResult); + } + + protected void closeLogWriter(LogWriter logWriter) { + if (logWriter != null) { + try { + logWriter.close(); + } catch (IOException e) { + logError("Failed to close a log writer", e); + } + } + } + + protected abstract void logError(String errorMessage, Exception e); + + protected LogWriter createLogWriter(String logFilePath) throws IOException { + return logWriterFactory.createLogWriter(logFilePath); + } + + private void notifyTransactionBatchStarted(ImportTransactionBatchStatus status) { + for (ImportEventListener listener : listeners) { + listener.onTransactionBatchStarted(status); + } + } + + private void notifyTransactionBatchCompleted(ImportTransactionBatchResult batchResult) { + for (ImportEventListener listener : listeners) { + listener.onTransactionBatchCompleted(batchResult); + } + } + + private List updateTargetStatusForAbortedTransactionBatch( + List targetResults) { + for (int i = 0; i < targetResults.size(); i++) { + ImportTargetResult target = targetResults.get(i); + if (target.getStatus().equals(ImportTargetResultStatus.SAVED)) { + ImportTargetResult newTarget = + ImportTargetResult.builder() + .importAction(target.getImportAction()) + .status(ImportTargetResultStatus.ABORTED) + .importedRecord(target.getImportedRecord()) + .namespace(target.getNamespace()) + .tableName(target.getTableName()) + .dataMapped(target.isDataMapped()) + .errors(Collections.singletonList(Constants.ABORT_TRANSACTION_STATUS)) + .build(); + targetResults.set(i, newTarget); + } + } + return targetResults; + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java new file mode 100644 index 0000000000..fc0039bf90 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java @@ -0,0 +1,13 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +import lombok.Builder; +import lombok.Value; + +@Value +@Builder +public class ImportLoggerConfig { + String logDirectoryPath; + boolean logSuccessRecords; + boolean logRawSourceRecords; + boolean prettyPrint; +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java new file mode 100644 index 0000000000..396cb3d8e4 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java @@ -0,0 +1,7 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +/** The location where the logs are stored. */ +public enum LogStorageLocation { + LOCAL_FILE_STORAGE, + AWS_S3 +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java new file mode 100644 index 0000000000..e851631468 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -0,0 +1,140 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +import com.fasterxml.jackson.databind.JsonNode; +import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriter; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResult; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResultStatus; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; +import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +public class SingleFileImportLogger extends AbstractImportLogger { + + protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; + protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; + protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; + private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLogger.class); + private LogWriter summaryLogWriter; + private LogWriter successLogWriter; + private LogWriter failureLogWriter; + + public SingleFileImportLogger(ImportLoggerConfig config, LogWriterFactory logWriterFactory) + throws IOException { + super(config, logWriterFactory); + successLogWriter = createLogWriter(config.getLogDirectoryPath() + SUCCESS_LOG_FILE_NAME); + failureLogWriter = createLogWriter(config.getLogDirectoryPath() + FAILURE_LOG_FILE_NAME); + } + + @Override + public void onTaskComplete(ImportTaskResult taskResult) { + if (!config.isLogSuccessRecords() && !config.isLogRawSourceRecords()) return; + try { + writeImportTaskResultDetailToLogs(taskResult); + } catch (Exception e) { + logError("Failed to write success/failure logs", e); + } + } + + @Override + public void addOrUpdateDataChunkStatus(ImportDataChunkStatus status) {} + + @Override + public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { + try { + logDataChunkSummary(dataChunkStatus); + } catch (IOException e) { + logError("Failed to log the data chunk summary", e); + } + } + + @Override + public void onAllDataChunksCompleted() { + closeAllLogWriters(); + } + + @Override + protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { + try { + LogWriter logWriter = getLogWriterForTransactionBatch(batchResult); + JsonNode jsonNode = createFilteredTransactionBatchLogJsonNode(batchResult); + writeToLogWriter(logWriter, jsonNode); + } catch (IOException e) { + logError("Failed to write a transaction batch record to the log file", e); + } + } + + @Override + protected void logError(String errorMessage, Exception exception) { + LOGGER.error(errorMessage, exception); + } + + private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws IOException { + if (summaryLogWriter == null) { + summaryLogWriter = createLogWriter(config.getLogDirectoryPath() + SUMMARY_LOG_FILE_NAME); + } + writeImportDataChunkSummary(dataChunkStatus, summaryLogWriter); + } + + private void writeImportDataChunkSummary( + ImportDataChunkStatus dataChunkStatus, LogWriter logWriter) throws IOException { + JsonNode jsonNode = OBJECT_MAPPER.valueToTree(dataChunkStatus); + writeToLogWriter(logWriter, jsonNode); + } + + private LogWriter getLogWriterForTransactionBatch(ImportTransactionBatchResult batchResult) + throws IOException { + String logFileName = batchResult.isSuccess() ? SUCCESS_LOG_FILE_NAME : FAILURE_LOG_FILE_NAME; + LogWriter logWriter = batchResult.isSuccess() ? successLogWriter : failureLogWriter; + if (logWriter == null) { + logWriter = createLogWriter(config.getLogDirectoryPath() + logFileName); + if (batchResult.isSuccess()) { + successLogWriter = logWriter; + } else { + failureLogWriter = logWriter; + } + } + return logWriter; + } + + private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult) + throws IOException { + JsonNode jsonNode; + for (ImportTargetResult target : importTaskResult.getTargets()) { + if (config.isLogSuccessRecords() + && target.getStatus().equals(ImportTargetResultStatus.SAVED)) { + synchronized (successLogWriter) { + jsonNode = OBJECT_MAPPER.valueToTree(target); + successLogWriter.write(jsonNode); + successLogWriter.flush(); + } + } + if (config.isLogRawSourceRecords() + && !target.getStatus().equals(ImportTargetResultStatus.SAVED)) { + synchronized (failureLogWriter) { + jsonNode = OBJECT_MAPPER.valueToTree(target); + failureLogWriter.write(jsonNode); + failureLogWriter.flush(); + } + } + } + } + + private void writeToLogWriter(LogWriter logWriter, JsonNode jsonNode) throws IOException { + logWriter.write(jsonNode); + logWriter.flush(); + } + + private void closeAllLogWriters() { + closeLogWriter(summaryLogWriter); + closeLogWriter(successLogWriter); + closeLogWriter(failureLogWriter); + summaryLogWriter = null; + successLogWriter = null; + failureLogWriter = null; + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java new file mode 100644 index 0000000000..f12e6e5f73 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -0,0 +1,185 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +import com.fasterxml.jackson.databind.JsonNode; +import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogFileType; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriter; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResult; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResultStatus; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; +import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class SplitByDataChunkImportLogger extends AbstractImportLogger { + + protected static final String SUMMARY_LOG_FILE_NAME_FORMAT = "data_chunk_%s_summary.json"; + protected static final String FAILURE_LOG_FILE_NAME_FORMAT = "data_chunk_%s_failure.json"; + protected static final String SUCCESS_LOG_FILE_NAME_FORMAT = "data_chunk_%s_success.json"; + + private static final Logger LOGGER = LoggerFactory.getLogger(SplitByDataChunkImportLogger.class); + private final Map summaryLogWriters = new HashMap<>(); + private final Map successLogWriters = new HashMap<>(); + private final Map failureLogWriters = new HashMap<>(); + + public SplitByDataChunkImportLogger( + ImportLoggerConfig config, LogWriterFactory logWriterFactory) { + super(config, logWriterFactory); + } + + @Override + public void onTaskComplete(ImportTaskResult taskResult) { + if (!config.isLogSuccessRecords() && !config.isLogRawSourceRecords()) return; + try { + writeImportTaskResultDetailToLogs(taskResult); + } catch (IOException e) { + LOGGER.error("Failed to write success/failure logs"); + } + } + + private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult) + throws IOException { + JsonNode jsonNode; + for (ImportTargetResult target : importTaskResult.getTargets()) { + if (config.isLogSuccessRecords() + && target.getStatus().equals(ImportTargetResultStatus.SAVED)) { + jsonNode = OBJECT_MAPPER.valueToTree(target); + synchronized (successLogWriters) { + LogWriter successLogWriter = + initializeLogWriterIfNeeded(LogFileType.SUCCESS, importTaskResult.getDataChunkId()); + successLogWriter.write(jsonNode); + successLogWriter.flush(); + } + } + if (config.isLogRawSourceRecords() + && !target.getStatus().equals(ImportTargetResultStatus.SAVED)) { + jsonNode = OBJECT_MAPPER.valueToTree(target); + synchronized (failureLogWriters) { + LogWriter failureLogWriter = + initializeLogWriterIfNeeded(LogFileType.FAILURE, importTaskResult.getDataChunkId()); + failureLogWriter.write(jsonNode); + failureLogWriter.flush(); + } + } + } + } + + @Override + public void addOrUpdateDataChunkStatus(ImportDataChunkStatus status) {} + + @Override + public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { + try { + logDataChunkSummary(dataChunkStatus); + // Close the split log writers per data chunk if they exist for this data chunk id + closeLogWritersForDataChunk(dataChunkStatus.getDataChunkId()); + } catch (IOException e) { + LOGGER.error("Failed to log the data chunk summary", e); + } + } + + @Override + public void onAllDataChunksCompleted() { + closeAllDataChunkLogWriters(); + } + + @Override + protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { + LogFileType logFileType = batchResult.isSuccess() ? LogFileType.SUCCESS : LogFileType.FAILURE; + try (LogWriter logWriter = + initializeLogWriterIfNeeded(logFileType, batchResult.getDataChunkId())) { + JsonNode jsonNode = createFilteredTransactionBatchLogJsonNode(batchResult); + synchronized (logWriter) { + logWriter.write(jsonNode); + logWriter.flush(); + } + } catch (IOException e) { + LOGGER.error("Failed to write a transaction batch record to a split mode log file", e); + } + } + + @Override + protected void logError(String errorMessage, Exception exception) { + LOGGER.error(errorMessage, exception); + } + + private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws IOException { + try (LogWriter logWriter = + initializeLogWriterIfNeeded(LogFileType.SUMMARY, dataChunkStatus.getDataChunkId())) { + logWriter.write(OBJECT_MAPPER.valueToTree(dataChunkStatus)); + logWriter.flush(); + } + } + + private void closeLogWritersForDataChunk(int dataChunkId) { + closeLogWriter(successLogWriters.remove(dataChunkId)); + closeLogWriter(failureLogWriters.remove(dataChunkId)); + closeLogWriter(summaryLogWriters.remove(dataChunkId)); + } + + private void closeAllDataChunkLogWriters() { + summaryLogWriters.values().forEach(this::closeLogWriter); + successLogWriters.values().forEach(this::closeLogWriter); + failureLogWriters.values().forEach(this::closeLogWriter); + summaryLogWriters.clear(); + successLogWriters.clear(); + failureLogWriters.clear(); + } + + private String getLogFilePath(long batchId, LogFileType logFileType) { + String logfilePath; + switch (logFileType) { + case SUCCESS: + logfilePath = + config.getLogDirectoryPath() + String.format(SUCCESS_LOG_FILE_NAME_FORMAT, batchId); + break; + case FAILURE: + logfilePath = + config.getLogDirectoryPath() + String.format(FAILURE_LOG_FILE_NAME_FORMAT, batchId); + break; + case SUMMARY: + logfilePath = + config.getLogDirectoryPath() + String.format(SUMMARY_LOG_FILE_NAME_FORMAT, batchId); + break; + default: + logfilePath = ""; + } + return logfilePath; + } + + private LogWriter initializeLogWriterIfNeeded(LogFileType logFileType, int dataChunkId) + throws IOException { + Map logWriters = getLogWriters(logFileType); + if (!logWriters.containsKey(dataChunkId)) { + LogWriter logWriter = createLogWriter(logFileType, dataChunkId); + logWriters.put(dataChunkId, logWriter); + } + return logWriters.get(dataChunkId); + } + + private LogWriter createLogWriter(LogFileType logFileType, int dataChunkId) throws IOException { + String logFilePath = getLogFilePath(dataChunkId, logFileType); + return createLogWriter(logFilePath); + } + + private Map getLogWriters(LogFileType logFileType) { + Map logWriterMap = null; + switch (logFileType) { + case SUCCESS: + logWriterMap = successLogWriters; + break; + case FAILURE: + logWriterMap = failureLogWriters; + break; + case SUMMARY: + logWriterMap = summaryLogWriters; + break; + } + return logWriterMap; + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java new file mode 100644 index 0000000000..857fc47a69 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java @@ -0,0 +1,30 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import com.fasterxml.jackson.databind.JsonNode; +import lombok.AllArgsConstructor; +import software.amazon.awssdk.services.s3.S3AsyncClient; + +import java.io.IOException; + +@AllArgsConstructor +public class AwsS3LogWriter implements LogWriter { + + private final S3AsyncClient s3AsyncClient; + private final String bucketName; + private final String objectKey; + + @Override + public void write(JsonNode sourceRecord) throws IOException { + // Implementation to write content to cloud storage + } + + @Override + public void flush() throws IOException { + // Implementation to flush content to cloud storage + } + + @Override + public void close() throws IOException { + // Implementation to close the cloud storage connection + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java new file mode 100644 index 0000000000..6940f8d16a --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java @@ -0,0 +1,36 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; +import lombok.AllArgsConstructor; + +import java.io.IOException; + +/** A factory class to create log writers. */ +@AllArgsConstructor +public class DefaultLogWriterFactory implements LogWriterFactory { + + private final LogWriterFactoryConfig config; + private final ImportLoggerConfig importLoggerConfig; + + /** + * Creates a log writer based on the configuration. + * + * @param logFilePath the path of the log file + * @return the log writer + */ + @Override + public LogWriter createLogWriter(String logFilePath) throws IOException { + LogWriter logWriter = null; + switch (config.getLogStorageLocation()) { + case LOCAL_FILE_STORAGE: + logWriter = new LocalFileLogWriter(logFilePath, importLoggerConfig); + break; + case AWS_S3: + logWriter = + new AwsS3LogWriter( + config.getS3AsyncClient(), config.getBucketName(), config.getObjectKey()); + break; + } + return logWriter; + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java new file mode 100644 index 0000000000..eb152b6e1d --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java @@ -0,0 +1,64 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonNode; +import com.scalar.db.dataloader.core.DataLoaderObjectMapper; +import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; + +public class LocalFileLogWriter implements LogWriter { + private final JsonGenerator logWriter; + private final DataLoaderObjectMapper objectMapper; + + /** + * Creates an instance of LocalFileLogWriter with the specified file path and log file type. + * + * @param filePath the file path + * @throws IOException if an I/O error occurs + */ + public LocalFileLogWriter(String filePath, ImportLoggerConfig importLoggerConfig) + throws IOException { + Path path = Paths.get(filePath); + this.objectMapper = new DataLoaderObjectMapper(); + this.logWriter = + objectMapper + .getFactory() + .createGenerator( + Files.newBufferedWriter( + path, StandardOpenOption.CREATE, StandardOpenOption.APPEND)); + // Start the JSON array + if (importLoggerConfig.isPrettyPrint()) this.logWriter.useDefaultPrettyPrinter(); + this.logWriter.writeStartArray(); + this.logWriter.flush(); + } + + @Override + public void write(JsonNode sourceRecord) throws IOException { + if (sourceRecord == null) { + return; + } + synchronized (logWriter) { + objectMapper.writeValue(logWriter, sourceRecord); + } + } + + @Override + public void flush() throws IOException { + logWriter.flush(); + } + + @Override + public void close() throws IOException { + if (logWriter.isClosed()) { + return; + } + logWriter.writeEndArray(); + logWriter.flush(); + logWriter.close(); + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java new file mode 100644 index 0000000000..5483aefc91 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java @@ -0,0 +1,8 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +/** The type of the log writer. */ +public enum LogFileType { + SUCCESS, + FAILURE, + SUMMARY +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java new file mode 100644 index 0000000000..cfd713acc3 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java @@ -0,0 +1,15 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import com.fasterxml.jackson.databind.JsonNode; + +import java.io.IOException; + +public interface LogWriter extends AutoCloseable { + + void write(JsonNode sourceRecord) throws IOException; + + void flush() throws IOException; + + @Override + void close() throws IOException; +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java new file mode 100644 index 0000000000..b3c4dfc080 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java @@ -0,0 +1,8 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import java.io.IOException; + +public interface LogWriterFactory { + + LogWriter createLogWriter(String logFilePath) throws IOException; +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java new file mode 100644 index 0000000000..901d0aae6f --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java @@ -0,0 +1,15 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import com.scalar.db.dataloader.core.dataimport.log.LogStorageLocation; +import lombok.Builder; +import lombok.Value; +import software.amazon.awssdk.services.s3.S3AsyncClient; + +@Builder +@Value +public class LogWriterFactoryConfig { + LogStorageLocation logStorageLocation; + S3AsyncClient s3AsyncClient; + String bucketName; + String objectKey; +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java new file mode 100644 index 0000000000..cd80cf61d1 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java @@ -0,0 +1,271 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.scalar.db.dataloader.core.DataLoaderObjectMapper; +import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; +import com.scalar.db.dataloader.core.dataimport.log.writer.DefaultLogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactoryConfig; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; +import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class SingleFileImportLoggerTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLoggerTest.class); + private static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); + + @TempDir Path tempDir; + + private LogWriterFactory logWriterFactory; + + @BeforeEach + void setUp() { + LogWriterFactoryConfig logWriterFactoryConfig = + LogWriterFactoryConfig.builder() + .logStorageLocation(LogStorageLocation.LOCAL_FILE_STORAGE) + .build(); + ImportLoggerConfig importLoggerConfig = + ImportLoggerConfig.builder() + .prettyPrint(false) + .logSuccessRecords(false) + .logRawSourceRecords(false) + .logDirectoryPath("path") + .build(); + logWriterFactory = new DefaultLogWriterFactory(logWriterFactoryConfig, importLoggerConfig); + } + + @AfterEach + void tearDown() throws IOException { + cleanUpTempDir(); + } + + private void cleanUpTempDir() throws IOException { + try (Stream paths = Files.list(tempDir)) { + paths.forEach(this::deleteFile); + } + } + + private void deleteFile(Path file) { + try { + Files.deleteIfExists(file); + } catch (IOException e) { + LOGGER.error("Failed to delete file: {}", file, e); + } + } + + @Test + void onTransactionBatchCompleted_NoErrors_ShouldWriteToSuccessLogFile() throws IOException { + testTransactionBatchCompleted(true, true); + } + + @Test + void onTransactionBatchCompleted_HasErrors_ShouldWriteToFailureLogFile() throws IOException { + testTransactionBatchCompleted(false, true); + } + + private void testTransactionBatchCompleted(boolean success, boolean logSuccessRecords) + throws IOException { + // Arrange + ImportLoggerConfig config = + ImportLoggerConfig.builder() + .logDirectoryPath(tempDir.toString() + "/") + .logRawSourceRecords(true) + .logSuccessRecords(logSuccessRecords) + .build(); + SingleFileImportLogger importLogger = new SingleFileImportLogger(config, logWriterFactory); + + List batchResults = createBatchResults(1, success); + + // Act + for (ImportTransactionBatchResult batchResult : batchResults) { + importLogger.onTransactionBatchCompleted(batchResult); + importLogger.onDataChunkCompleted( + ImportDataChunkStatus.builder().dataChunkId(batchResult.getDataChunkId()).build()); + } + importLogger.onAllDataChunksCompleted(); + + // Assert + assertTransactionBatchResults(batchResults, success, logSuccessRecords); + } + + private List createBatchResults(int count, boolean success) { + List batchResults = new ArrayList<>(); + + for (int i = 1; i <= count; i++) { + List records = + Collections.singletonList( + ImportTaskResult.builder() + .rowNumber(i) + .rawRecord(OBJECT_MAPPER.createObjectNode()) + .targets(Collections.EMPTY_LIST) + .build()); + ImportTransactionBatchResult result = + ImportTransactionBatchResult.builder() + .dataChunkId(i) + .transactionBatchId(1) + .records(records) + .success(success) + .build(); + batchResults.add(result); + } + + return batchResults; + } + + private void assertTransactionBatchResults( + List batchResults, boolean success, boolean logSuccessRecords) + throws IOException { + DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); + + // Single file log mode + Path logFileName = + tempDir.resolve( + success + ? SingleFileImportLogger.SUCCESS_LOG_FILE_NAME + : SingleFileImportLogger.FAILURE_LOG_FILE_NAME); + if (logSuccessRecords || !success) { + assertTrue(Files.exists(logFileName), "Log file should exist"); + + String logContent = new String(Files.readAllBytes(logFileName), StandardCharsets.UTF_8); + + List logEntries = + objectMapper.readValue( + logContent, new TypeReference>() {}); + + assertEquals( + batchResults.size(), + logEntries.size(), + "Number of log entries should match the number of batch results"); + + for (int i = 0; i < batchResults.size(); i++) { + assertTransactionBatchResult(batchResults.get(i), logEntries.get(i)); + } + } else { + assertFalse(Files.exists(logFileName), "Log file should not exist"); + } + } + + private void assertTransactionBatchResult( + ImportTransactionBatchResult expected, ImportTransactionBatchResult actual) { + assertEquals(expected.getDataChunkId(), actual.getDataChunkId(), "Data chunk ID should match"); + assertEquals( + expected.getTransactionBatchId(), + actual.getTransactionBatchId(), + "Transaction batch ID should match"); + assertEquals( + expected.getTransactionId(), actual.getTransactionId(), "Transaction ID should match"); + assertEquals(expected.isSuccess(), actual.isSuccess(), "Success status should match"); + + List expectedRecords = expected.getRecords(); + List actualRecords = actual.getRecords(); + assertEquals(expectedRecords.size(), actualRecords.size(), "Number of records should match"); + for (int j = 0; j < expectedRecords.size(); j++) { + ImportTaskResult expectedRecord = expectedRecords.get(j); + ImportTaskResult actualRecord = actualRecords.get(j); + assertEquals( + expectedRecord.getRowNumber(), actualRecord.getRowNumber(), "Row number should match"); + assertEquals( + expectedRecord.getRawRecord(), actualRecord.getRawRecord(), "Raw record should match"); + assertEquals(expectedRecord.getTargets(), actualRecord.getTargets(), "Targets should match"); + } + } + + @Test + void onDataChunkCompleted_NoErrors_ShouldWriteToSummaryLogFile() throws IOException { + testDataChunkCompleted(false); + } + + @Test + void onDataChunkCompleted_HasErrors_ShouldWriteToSummaryLogFile() throws IOException { + testDataChunkCompleted(true); + } + + private void testDataChunkCompleted(boolean hasErrors) throws IOException { + ImportLoggerConfig config = + ImportLoggerConfig.builder() + .logDirectoryPath(tempDir.toString() + "/") + .logRawSourceRecords(true) + .logSuccessRecords(true) + .build(); + SingleFileImportLogger importLogger = new SingleFileImportLogger(config, logWriterFactory); + + List dataChunkStatuses = + Stream.of(1, 2) + .map(id -> createDataChunkStatus(id, hasErrors)) + .collect(Collectors.toList()); + + dataChunkStatuses.forEach(importLogger::onDataChunkCompleted); + importLogger.onAllDataChunksCompleted(); + + assertDataChunkStatusLog(SingleFileImportLogger.SUMMARY_LOG_FILE_NAME, dataChunkStatuses); + } + + private ImportDataChunkStatus createDataChunkStatus(int dataChunkId, boolean hasErrors) { + return ImportDataChunkStatus.builder() + .dataChunkId(dataChunkId) + .startTime(Instant.now()) + .endTime(Instant.now()) + .totalRecords(100) + .successCount(hasErrors ? 90 : 100) + .failureCount(hasErrors ? 10 : 0) + .batchCount(5) + .totalDurationInMilliSeconds(1000) + .build(); + } + + private void assertDataChunkStatusLog( + String logFilePattern, List dataChunkStatuses) throws IOException { + assertSingleFileLog(tempDir, logFilePattern, dataChunkStatuses); + } + + private void assertSingleFileLog( + Path tempDir, String logFileName, List dataChunkStatuses) + throws IOException { + Path summaryLogFile = tempDir.resolve(logFileName); + assertTrue(Files.exists(summaryLogFile)); + + String logContent = new String(Files.readAllBytes(summaryLogFile), StandardCharsets.UTF_8); + DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); + List logEntries = + objectMapper.readValue(logContent, new TypeReference>() {}); + + assertEquals(dataChunkStatuses.size(), logEntries.size()); + for (int i = 0; i < dataChunkStatuses.size(); i++) { + assertDataChunkStatusEquals(dataChunkStatuses.get(i), logEntries.get(i)); + } + } + + private void assertDataChunkStatusEquals( + ImportDataChunkStatus expected, ImportDataChunkStatus actual) { + assertEquals(expected.getDataChunkId(), actual.getDataChunkId()); + assertEquals(expected.getStartTime(), actual.getStartTime()); + assertEquals(expected.getEndTime(), actual.getEndTime()); + assertEquals(expected.getTotalRecords(), actual.getTotalRecords()); + assertEquals(expected.getSuccessCount(), actual.getSuccessCount()); + assertEquals(expected.getFailureCount(), actual.getFailureCount()); + assertEquals(expected.getBatchCount(), actual.getBatchCount()); + assertEquals( + expected.getTotalDurationInMilliSeconds(), actual.getTotalDurationInMilliSeconds()); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java new file mode 100644 index 0000000000..e1f397caf5 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java @@ -0,0 +1,244 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.scalar.db.dataloader.core.DataLoaderObjectMapper; +import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; +import com.scalar.db.dataloader.core.dataimport.log.writer.DefaultLogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; +import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactoryConfig; +import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; +import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class SplitByDataChunkImportLoggerTest { + + private static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); + + @TempDir Path tempDir; + + private LogWriterFactory logWriterFactory; + + @BeforeEach + void setUp() { + LogWriterFactoryConfig logWriterFactoryConfig = + LogWriterFactoryConfig.builder() + .logStorageLocation(LogStorageLocation.LOCAL_FILE_STORAGE) + .build(); + ImportLoggerConfig importLoggerConfig = + ImportLoggerConfig.builder() + .prettyPrint(false) + .logSuccessRecords(false) + .logRawSourceRecords(false) + .logDirectoryPath("path") + .build(); + logWriterFactory = new DefaultLogWriterFactory(logWriterFactoryConfig, importLoggerConfig); + } + + @Test + void onTransactionBatchCompleted_NoErrors_ShouldWriteToDataChunkSuccessFiles() + throws IOException { + testTransactionBatchCompleted(true, true); + } + + @Test + void onTransactionBatchCompleted_HasErrors_ShouldWriteToDataChunkFailureFiles() + throws IOException { + testTransactionBatchCompleted(false, true); + } + + @Test + void onTransactionBatchCompleted_NoErrorsAndNoSuccessFileLogging_ShouldNotWriteToSuccessFiles() + throws IOException { + testTransactionBatchCompleted(true, false); + } + + private void testTransactionBatchCompleted(boolean success, boolean logSuccessRecords) + throws IOException { + // Arrange + ImportLoggerConfig config = + ImportLoggerConfig.builder() + .logDirectoryPath(tempDir.toString() + "/") + .logRawSourceRecords(true) + .logSuccessRecords(logSuccessRecords) + .build(); + SplitByDataChunkImportLogger importLogger = + new SplitByDataChunkImportLogger(config, logWriterFactory); + + List batchResults = new ArrayList<>(); + + for (int i = 1; i <= 3; i++) { + List records = + Collections.singletonList( + ImportTaskResult.builder() + .rowNumber(i) + .targets(Collections.EMPTY_LIST) + .rawRecord(OBJECT_MAPPER.createObjectNode()) + .build()); + ImportTransactionBatchResult result = + ImportTransactionBatchResult.builder() + .dataChunkId(i) + .transactionBatchId(1) + .records(records) + .success(success) + .build(); + batchResults.add(result); + } + + // Act + for (ImportTransactionBatchResult batchResult : batchResults) { + importLogger.onTransactionBatchCompleted(batchResult); + importLogger.onDataChunkCompleted( + ImportDataChunkStatus.builder().dataChunkId(batchResult.getDataChunkId()).build()); + } + importLogger.onAllDataChunksCompleted(); + + // Assert + for (int i = 0; i < batchResults.size(); i++) { + ImportTransactionBatchResult batchResult = batchResults.get(i); + String logFileNameFormat = + success + ? SplitByDataChunkImportLogger.SUCCESS_LOG_FILE_NAME_FORMAT + : SplitByDataChunkImportLogger.FAILURE_LOG_FILE_NAME_FORMAT; + Path dataChunkLogFileName = tempDir.resolve(String.format(logFileNameFormat, i + 1)); + + if (success && logSuccessRecords) { + assertTrue(Files.exists(dataChunkLogFileName), "Data chunk success log file should exist"); + assertTransactionBatchResult(batchResult, dataChunkLogFileName); + } else if (!success) { + assertTrue(Files.exists(dataChunkLogFileName), "Data chunk failure log file should exist"); + assertTransactionBatchResult(batchResult, dataChunkLogFileName); + } else { + assertFalse( + Files.exists(dataChunkLogFileName), "Data chunk success log file should not exist"); + } + } + } + + private void assertTransactionBatchResult( + ImportTransactionBatchResult expected, Path dataChunkLogFileName) throws IOException { + // String logContent = Files.readString(dataChunkLogFileName); + String logContent = + new String(Files.readAllBytes(dataChunkLogFileName), StandardCharsets.UTF_8); + DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); + List logEntries = + objectMapper.readValue( + logContent, new TypeReference>() {}); + ImportTransactionBatchResult actual = logEntries.get(0); + + assertEquals(expected.getDataChunkId(), actual.getDataChunkId(), "Data chunk ID should match"); + assertEquals( + expected.getTransactionBatchId(), + actual.getTransactionBatchId(), + "Transaction batch ID should match"); + assertEquals( + expected.getTransactionId(), actual.getTransactionId(), "Transaction ID should match"); + assertEquals(expected.isSuccess(), actual.isSuccess(), "Success status should match"); + + List expectedRecords = expected.getRecords(); + List actualRecords = actual.getRecords(); + assertEquals(expectedRecords.size(), actualRecords.size(), "Number of records should match"); + for (int j = 0; j < expectedRecords.size(); j++) { + ImportTaskResult expectedRecord = expectedRecords.get(j); + ImportTaskResult actualRecord = actualRecords.get(j); + assertEquals( + expectedRecord.getRowNumber(), actualRecord.getRowNumber(), "Row number should match"); + assertEquals( + expectedRecord.getRawRecord(), actualRecord.getRawRecord(), "Raw record should match"); + assertEquals(expectedRecord.getTargets(), actualRecord.getTargets(), "Targets should match"); + } + } + + @Test + void onDataChunkCompleted_NoErrors_ShouldWriteToSummaryLogFile() throws IOException { + testDataChunkCompleted( + String.format(SplitByDataChunkImportLogger.SUMMARY_LOG_FILE_NAME_FORMAT, "%d"), false); + } + + @Test + void onDataChunkCompleted_HasErrors_ShouldWriteToSummaryLogFile() throws IOException { + testDataChunkCompleted( + String.format(SplitByDataChunkImportLogger.SUMMARY_LOG_FILE_NAME_FORMAT, "%d"), true); + } + + private void testDataChunkCompleted(String logFilePattern, boolean hasErrors) throws IOException { + ImportLoggerConfig config = + ImportLoggerConfig.builder() + .logDirectoryPath(tempDir.toString() + "/") + .logRawSourceRecords(true) + .logSuccessRecords(true) + .build(); + SplitByDataChunkImportLogger importLogger = + new SplitByDataChunkImportLogger(config, logWriterFactory); + + List dataChunkStatuses = + IntStream.rangeClosed(1, 2) + .mapToObj(id -> createDataChunkStatus(id, hasErrors)) + .collect(Collectors.toList()); + + dataChunkStatuses.forEach(importLogger::onDataChunkCompleted); + importLogger.onAllDataChunksCompleted(); + + assertDataChunkStatusLog(logFilePattern, dataChunkStatuses); + } + + private ImportDataChunkStatus createDataChunkStatus(int dataChunkId, boolean hasErrors) { + return ImportDataChunkStatus.builder() + .dataChunkId(dataChunkId) + .startTime(Instant.now()) + .endTime(Instant.now()) + .totalRecords(100) + .successCount(hasErrors ? 90 : 100) + .failureCount(hasErrors ? 10 : 0) + .batchCount(5) + .totalDurationInMilliSeconds(1000) + .build(); + } + + private void assertDataChunkStatusLog( + String logFilePattern, List dataChunkStatuses) throws IOException { + for (ImportDataChunkStatus dataChunkStatus : dataChunkStatuses) { + String logFileName = String.format(logFilePattern, dataChunkStatus.getDataChunkId()); + Path dataChunkLogFile = tempDir.resolve(logFileName); + assertTrue(Files.exists(dataChunkLogFile), "Data chunk summary log file should exist"); + + // String logContent = Files.readString(dataChunkLogFile); + String logContent = new String(Files.readAllBytes(dataChunkLogFile), StandardCharsets.UTF_8); + DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); + List logEntries = + objectMapper.readValue(logContent, new TypeReference>() {}); + + assertEquals(1, logEntries.size()); + assertDataChunkStatusEquals(dataChunkStatus, logEntries.get(0)); + } + } + + private void assertDataChunkStatusEquals( + ImportDataChunkStatus expected, ImportDataChunkStatus actual) { + assertEquals(expected.getDataChunkId(), actual.getDataChunkId()); + assertEquals(expected.getStartTime(), actual.getStartTime()); + assertEquals(expected.getEndTime(), actual.getEndTime()); + assertEquals(expected.getTotalRecords(), actual.getTotalRecords()); + assertEquals(expected.getSuccessCount(), actual.getSuccessCount()); + assertEquals(expected.getFailureCount(), actual.getFailureCount()); + assertEquals(expected.getBatchCount(), actual.getBatchCount()); + assertEquals( + expected.getTotalDurationInMilliSeconds(), actual.getTotalDurationInMilliSeconds()); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java new file mode 100644 index 0000000000..5182b97cb8 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java @@ -0,0 +1,67 @@ +package com.scalar.db.dataloader.core.dataimport.log.writer; + +import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; +import com.scalar.db.dataloader.core.dataimport.log.LogStorageLocation; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import software.amazon.awssdk.services.s3.S3AsyncClient; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; + +class DefaultLogWriterFactoryTest { + + String filePath = Paths.get("").toAbsolutePath() + "/sample.log"; + DefaultLogWriterFactory defaultLogWriterFactory; + + @AfterEach + void removeFileIfCreated() { + File file = new File(filePath); + if (file.exists()) { + file.deleteOnExit(); + } + } + + @Test + void createLogWriter_withValidLocalLogFilePath_shouldReturnLocalFileLogWriterObject() + throws IOException { + defaultLogWriterFactory = + new DefaultLogWriterFactory( + LogWriterFactoryConfig.builder() + .logStorageLocation(LogStorageLocation.LOCAL_FILE_STORAGE) + .build(), + ImportLoggerConfig.builder() + .prettyPrint(false) + .logSuccessRecords(false) + .logRawSourceRecords(false) + .logDirectoryPath("path") + .build()); + LogWriter logWriter = defaultLogWriterFactory.createLogWriter(filePath); + Assertions.assertEquals(LocalFileLogWriter.class, logWriter.getClass()); + logWriter.close(); + } + + @Test + void createLogWriter_withValidFilePath_shouldReturnLogWriterObject() throws IOException { + defaultLogWriterFactory = + new DefaultLogWriterFactory( + LogWriterFactoryConfig.builder() + .logStorageLocation(LogStorageLocation.AWS_S3) + .bucketName("bucket") + .objectKey("ObjectKay") + .s3AsyncClient(Mockito.mock(S3AsyncClient.class)) + .build(), + ImportLoggerConfig.builder() + .prettyPrint(false) + .logSuccessRecords(false) + .logRawSourceRecords(false) + .logDirectoryPath("path") + .build()); + LogWriter logWriter = defaultLogWriterFactory.createLogWriter(filePath); + Assertions.assertEquals(AwsS3LogWriter.class, logWriter.getClass()); + logWriter.close(); + } +} From 89b9f058fccb145350bfe6fd9ee91a99a513fe62 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 11 Apr 2025 09:27:47 +0530 Subject: [PATCH 02/27] Spotless applied again --- .../dataimport/log/AbstractImportLogger.java | 3 +-- .../log/SingleFileImportLogger.java | 3 +-- .../log/SplitByDataChunkImportLogger.java | 5 ++--- .../dataimport/log/writer/AwsS3LogWriter.java | 3 +-- .../log/writer/DefaultLogWriterFactory.java | 3 +-- .../log/writer/LocalFileLogWriter.java | 1 - .../core/dataimport/log/writer/LogWriter.java | 1 - .../log/SingleFileImportLoggerTest.java | 21 +++++++++---------- .../log/SplitByDataChunkImportLoggerTest.java | 15 +++++++------ .../writer/DefaultLogWriterFactoryTest.java | 7 +++---- 10 files changed, 26 insertions(+), 36 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java index eac36c802c..11a7493ca9 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -12,12 +12,11 @@ import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchStatus; -import lombok.RequiredArgsConstructor; - import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import lombok.RequiredArgsConstructor; @RequiredArgsConstructor public abstract class AbstractImportLogger implements ImportEventListener { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index e851631468..fc70770761 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -8,11 +8,10 @@ import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResultStatus; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; +import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; - public class SingleFileImportLogger extends AbstractImportLogger { protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index f12e6e5f73..bec306ef9b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -9,12 +9,11 @@ import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResultStatus; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.HashMap; import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SplitByDataChunkImportLogger extends AbstractImportLogger { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java index 857fc47a69..c11fab0b23 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java @@ -1,11 +1,10 @@ package com.scalar.db.dataloader.core.dataimport.log.writer; import com.fasterxml.jackson.databind.JsonNode; +import java.io.IOException; import lombok.AllArgsConstructor; import software.amazon.awssdk.services.s3.S3AsyncClient; -import java.io.IOException; - @AllArgsConstructor public class AwsS3LogWriter implements LogWriter { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java index 6940f8d16a..27c1eb6c5f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java @@ -1,9 +1,8 @@ package com.scalar.db.dataloader.core.dataimport.log.writer; import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; -import lombok.AllArgsConstructor; - import java.io.IOException; +import lombok.AllArgsConstructor; /** A factory class to create log writers. */ @AllArgsConstructor diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java index eb152b6e1d..b29395e8ec 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java @@ -4,7 +4,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.scalar.db.dataloader.core.DataLoaderObjectMapper; import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java index cfd713acc3..f10917901f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java @@ -1,7 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.log.writer; import com.fasterxml.jackson.databind.JsonNode; - import java.io.IOException; public interface LogWriter extends AutoCloseable { diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java index cd80cf61d1..98e58109e7 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java @@ -1,5 +1,9 @@ package com.scalar.db.dataloader.core.dataimport.log; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.fasterxml.jackson.core.type.TypeReference; import com.scalar.db.dataloader.core.DataLoaderObjectMapper; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; @@ -8,13 +12,6 @@ import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactoryConfig; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.io.TempDir; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -25,10 +22,12 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class SingleFileImportLoggerTest { diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java index e1f397caf5..800ae4e97a 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java @@ -1,5 +1,9 @@ package com.scalar.db.dataloader.core.dataimport.log; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.fasterxml.jackson.core.type.TypeReference; import com.scalar.db.dataloader.core.DataLoaderObjectMapper; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; @@ -8,10 +12,6 @@ import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactoryConfig; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.io.TempDir; - import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -22,10 +22,9 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; class SplitByDataChunkImportLoggerTest { diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java index 5182b97cb8..e9102bca61 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java @@ -2,16 +2,15 @@ import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; import com.scalar.db.dataloader.core.dataimport.log.LogStorageLocation; +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import software.amazon.awssdk.services.s3.S3AsyncClient; -import java.io.File; -import java.io.IOException; -import java.nio.file.Paths; - class DefaultLogWriterFactoryTest { String filePath = Paths.get("").toAbsolutePath() + "/sample.log"; From 49c83b69ed58b70003098bbf149cea3af1accba7 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 11 Apr 2025 11:08:01 +0530 Subject: [PATCH 03/27] Removed unused code --- .../core/dataimport/log/AbstractImportLogger.java | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java index 11a7493ca9..5faf8419dd 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -27,14 +27,6 @@ public abstract class AbstractImportLogger implements ImportEventListener { protected final LogWriterFactory logWriterFactory; protected final List listeners = new ArrayList<>(); - public void addListener(ImportEventListener listener) { - listeners.add(listener); - } - - public void removeListener(ImportEventListener listener) { - listeners.remove(listener); - } - @Override public void onDataChunkStarted(ImportDataChunkStatus importDataChunkStatus) { // Currently we are not logging the start of a data chunk @@ -57,11 +49,6 @@ public void onTransactionBatchCompleted(ImportTransactionBatchResult batchResult notifyTransactionBatchCompleted(batchResult); } - @Override - public void onTaskComplete(ImportTaskResult taskResult) { - // TODO: we can remove this event if it's current not being used in the import Manager as well - } - protected abstract void logTransactionBatch(ImportTransactionBatchResult batchResult); protected boolean shouldSkipLoggingSuccess(ImportTransactionBatchResult batchResult) { From c5c9c0aad87ad67543ab999dfd4d5540f621e8e6 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Tue, 15 Apr 2025 11:45:17 +0530 Subject: [PATCH 04/27] Removed unused classes and references --- data-loader/build.gradle | 1 - .../dataimport/log/LogStorageLocation.java | 7 ----- .../dataimport/log/writer/AwsS3LogWriter.java | 29 ------------------- .../log/writer/DefaultLogWriterFactory.java | 16 ++-------- .../log/writer/LogWriterFactoryConfig.java | 15 ---------- .../log/SingleFileImportLoggerTest.java | 7 +---- .../log/SplitByDataChunkImportLoggerTest.java | 7 +---- .../writer/DefaultLogWriterFactoryTest.java | 27 ----------------- 8 files changed, 4 insertions(+), 105 deletions(-) delete mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java delete mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java delete mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java diff --git a/data-loader/build.gradle b/data-loader/build.gradle index 5e9c2a4ba0..87a057933b 100644 --- a/data-loader/build.gradle +++ b/data-loader/build.gradle @@ -17,7 +17,6 @@ subprojects { implementation("org.apache.commons:commons-lang3:${commonsLangVersion}") implementation("commons-io:commons-io:${commonsIoVersion}") implementation("org.slf4j:slf4j-simple:${slf4jVersion}") - implementation("software.amazon.awssdk:s3:${awssdkVersion}") // Mockito testImplementation "org.mockito:mockito-core:${mockitoVersion}" diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java deleted file mode 100644 index 396cb3d8e4..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogStorageLocation.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.scalar.db.dataloader.core.dataimport.log; - -/** The location where the logs are stored. */ -public enum LogStorageLocation { - LOCAL_FILE_STORAGE, - AWS_S3 -} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java deleted file mode 100644 index c11fab0b23..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/AwsS3LogWriter.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.scalar.db.dataloader.core.dataimport.log.writer; - -import com.fasterxml.jackson.databind.JsonNode; -import java.io.IOException; -import lombok.AllArgsConstructor; -import software.amazon.awssdk.services.s3.S3AsyncClient; - -@AllArgsConstructor -public class AwsS3LogWriter implements LogWriter { - - private final S3AsyncClient s3AsyncClient; - private final String bucketName; - private final String objectKey; - - @Override - public void write(JsonNode sourceRecord) throws IOException { - // Implementation to write content to cloud storage - } - - @Override - public void flush() throws IOException { - // Implementation to flush content to cloud storage - } - - @Override - public void close() throws IOException { - // Implementation to close the cloud storage connection - } -} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java index 27c1eb6c5f..5b2dc77497 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java @@ -8,28 +8,16 @@ @AllArgsConstructor public class DefaultLogWriterFactory implements LogWriterFactory { - private final LogWriterFactoryConfig config; private final ImportLoggerConfig importLoggerConfig; /** - * Creates a log writer based on the configuration. + * Creates a log writer object * * @param logFilePath the path of the log file * @return the log writer */ @Override public LogWriter createLogWriter(String logFilePath) throws IOException { - LogWriter logWriter = null; - switch (config.getLogStorageLocation()) { - case LOCAL_FILE_STORAGE: - logWriter = new LocalFileLogWriter(logFilePath, importLoggerConfig); - break; - case AWS_S3: - logWriter = - new AwsS3LogWriter( - config.getS3AsyncClient(), config.getBucketName(), config.getObjectKey()); - break; - } - return logWriter; + return new LocalFileLogWriter(logFilePath, importLoggerConfig); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java deleted file mode 100644 index 901d0aae6f..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactoryConfig.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.scalar.db.dataloader.core.dataimport.log.writer; - -import com.scalar.db.dataloader.core.dataimport.log.LogStorageLocation; -import lombok.Builder; -import lombok.Value; -import software.amazon.awssdk.services.s3.S3AsyncClient; - -@Builder -@Value -public class LogWriterFactoryConfig { - LogStorageLocation logStorageLocation; - S3AsyncClient s3AsyncClient; - String bucketName; - String objectKey; -} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java index 98e58109e7..e2d17dfa4f 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java @@ -9,7 +9,6 @@ import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; import com.scalar.db.dataloader.core.dataimport.log.writer.DefaultLogWriterFactory; import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; -import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactoryConfig; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; import java.io.IOException; @@ -40,10 +39,6 @@ class SingleFileImportLoggerTest { @BeforeEach void setUp() { - LogWriterFactoryConfig logWriterFactoryConfig = - LogWriterFactoryConfig.builder() - .logStorageLocation(LogStorageLocation.LOCAL_FILE_STORAGE) - .build(); ImportLoggerConfig importLoggerConfig = ImportLoggerConfig.builder() .prettyPrint(false) @@ -51,7 +46,7 @@ void setUp() { .logRawSourceRecords(false) .logDirectoryPath("path") .build(); - logWriterFactory = new DefaultLogWriterFactory(logWriterFactoryConfig, importLoggerConfig); + logWriterFactory = new DefaultLogWriterFactory(importLoggerConfig); } @AfterEach diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java index 800ae4e97a..04d9906641 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java @@ -9,7 +9,6 @@ import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; import com.scalar.db.dataloader.core.dataimport.log.writer.DefaultLogWriterFactory; import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactory; -import com.scalar.db.dataloader.core.dataimport.log.writer.LogWriterFactoryConfig; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; import java.io.IOException; @@ -36,10 +35,6 @@ class SplitByDataChunkImportLoggerTest { @BeforeEach void setUp() { - LogWriterFactoryConfig logWriterFactoryConfig = - LogWriterFactoryConfig.builder() - .logStorageLocation(LogStorageLocation.LOCAL_FILE_STORAGE) - .build(); ImportLoggerConfig importLoggerConfig = ImportLoggerConfig.builder() .prettyPrint(false) @@ -47,7 +42,7 @@ void setUp() { .logRawSourceRecords(false) .logDirectoryPath("path") .build(); - logWriterFactory = new DefaultLogWriterFactory(logWriterFactoryConfig, importLoggerConfig); + logWriterFactory = new DefaultLogWriterFactory(importLoggerConfig); } @Test diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java index e9102bca61..3b99510a7e 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java @@ -1,15 +1,12 @@ package com.scalar.db.dataloader.core.dataimport.log.writer; import com.scalar.db.dataloader.core.dataimport.log.ImportLoggerConfig; -import com.scalar.db.dataloader.core.dataimport.log.LogStorageLocation; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.Mockito; -import software.amazon.awssdk.services.s3.S3AsyncClient; class DefaultLogWriterFactoryTest { @@ -29,9 +26,6 @@ void createLogWriter_withValidLocalLogFilePath_shouldReturnLocalFileLogWriterObj throws IOException { defaultLogWriterFactory = new DefaultLogWriterFactory( - LogWriterFactoryConfig.builder() - .logStorageLocation(LogStorageLocation.LOCAL_FILE_STORAGE) - .build(), ImportLoggerConfig.builder() .prettyPrint(false) .logSuccessRecords(false) @@ -42,25 +36,4 @@ void createLogWriter_withValidLocalLogFilePath_shouldReturnLocalFileLogWriterObj Assertions.assertEquals(LocalFileLogWriter.class, logWriter.getClass()); logWriter.close(); } - - @Test - void createLogWriter_withValidFilePath_shouldReturnLogWriterObject() throws IOException { - defaultLogWriterFactory = - new DefaultLogWriterFactory( - LogWriterFactoryConfig.builder() - .logStorageLocation(LogStorageLocation.AWS_S3) - .bucketName("bucket") - .objectKey("ObjectKay") - .s3AsyncClient(Mockito.mock(S3AsyncClient.class)) - .build(), - ImportLoggerConfig.builder() - .prettyPrint(false) - .logSuccessRecords(false) - .logRawSourceRecords(false) - .logDirectoryPath("path") - .build()); - LogWriter logWriter = defaultLogWriterFactory.createLogWriter(filePath); - Assertions.assertEquals(AwsS3LogWriter.class, logWriter.getClass()); - logWriter.close(); - } } From 3934c2a1a6a4ccdc3e6ffbb5e2ecb01bf859381f Mon Sep 17 00:00:00 2001 From: Peckstadt Yves Date: Wed, 16 Apr 2025 16:30:47 +0900 Subject: [PATCH 05/27] Improve Javadocs --- .../dataimport/log/AbstractImportLogger.java | 87 +++++++++++++++ .../dataimport/log/ImportLoggerConfig.java | 24 ++++ .../log/SingleFileImportLogger.java | 94 ++++++++++++++++ .../log/SplitByDataChunkImportLogger.java | 105 ++++++++++++++++++ .../log/writer/DefaultLogWriterFactory.java | 16 ++- .../log/writer/LocalFileLogWriter.java | 30 ++++- .../dataimport/log/writer/LogFileType.java | 19 +++- .../core/dataimport/log/writer/LogWriter.java | 23 ++++ .../log/writer/LogWriterFactory.java | 12 ++ 9 files changed, 402 insertions(+), 8 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java index 5faf8419dd..4a6121a97b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -18,26 +18,56 @@ import java.util.List; import lombok.RequiredArgsConstructor; +/** + * An abstract base class for logging import events during data loading operations. This class + * implements the {@link ImportEventListener} interface and provides common functionality for + * logging transaction batch results and managing event listeners. Concrete implementations should + * define how to log transaction batches and handle errors. + */ @RequiredArgsConstructor public abstract class AbstractImportLogger implements ImportEventListener { + /** Object mapper used for JSON serialization/deserialization. */ protected static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); + /** Configuration for the import logger. */ protected final ImportLoggerConfig config; + + /** Factory for creating log writers. */ protected final LogWriterFactory logWriterFactory; + + /** List of event listeners to be notified of import events. */ protected final List listeners = new ArrayList<>(); + /** + * Called when a data chunk import is started. Currently, this implementation does not log the + * start of a data chunk. + * + * @param importDataChunkStatus the status of the data chunk being imported + */ @Override public void onDataChunkStarted(ImportDataChunkStatus importDataChunkStatus) { // Currently we are not logging the start of a data chunk } + /** + * Called when a transaction batch is started. Currently, this implementation does not log the + * start of a transaction batch, but it notifies all registered listeners. + * + * @param batchStatus the status of the transaction batch being started + */ @Override public void onTransactionBatchStarted(ImportTransactionBatchStatus batchStatus) { // Currently we are not logging the start of a transaction batch notifyTransactionBatchStarted(batchStatus); } + /** + * Called when a transaction batch is completed. This method logs the transaction batch result if + * it should be logged based on the configuration, and notifies all registered listeners. + * + * @param batchResult the result of the completed transaction batch + */ @Override public void onTransactionBatchCompleted(ImportTransactionBatchResult batchResult) { // skip logging success records if the configuration is set to skip @@ -49,12 +79,32 @@ public void onTransactionBatchCompleted(ImportTransactionBatchResult batchResult notifyTransactionBatchCompleted(batchResult); } + /** + * Logs a transaction batch result. This method should be implemented by concrete subclasses to + * define how to log transaction batch results. + * + * @param batchResult the transaction batch result to log + */ protected abstract void logTransactionBatch(ImportTransactionBatchResult batchResult); + /** + * Determines whether logging of a successful transaction batch should be skipped. Logging is + * skipped if the batch was successful and the configuration specifies not to log success records. + * + * @param batchResult the transaction batch result to check + * @return true if logging should be skipped, false otherwise + */ protected boolean shouldSkipLoggingSuccess(ImportTransactionBatchResult batchResult) { return batchResult.isSuccess() && !config.isLogSuccessRecords(); } + /** + * Creates a filtered JSON representation of a transaction batch result. This method filters out + * raw record data if the configuration specifies not to log raw source records. + * + * @param batchResult the transaction batch result to convert to JSON + * @return a JsonNode representing the filtered transaction batch result + */ protected JsonNode createFilteredTransactionBatchLogJsonNode( ImportTransactionBatchResult batchResult) { @@ -105,6 +155,12 @@ protected JsonNode createFilteredTransactionBatchLogJsonNode( return OBJECT_MAPPER.valueToTree(modifiedBatchResult); } + /** + * Safely closes a log writer. If an IOException occurs during closing, it logs the error using + * the {@link #logError} method. + * + * @param logWriter the log writer to close, may be null + */ protected void closeLogWriter(LogWriter logWriter) { if (logWriter != null) { try { @@ -115,24 +171,55 @@ protected void closeLogWriter(LogWriter logWriter) { } } + /** + * Logs an error message and exception. This method should be implemented by concrete subclasses + * to define how to log errors. + * + * @param errorMessage the error message to log + * @param e the exception that caused the error + */ protected abstract void logError(String errorMessage, Exception e); + /** + * Creates a log writer for the specified log file path. + * + * @param logFilePath the path to the log file + * @return a new log writer + * @throws IOException if an I/O error occurs while creating the log writer + */ protected LogWriter createLogWriter(String logFilePath) throws IOException { return logWriterFactory.createLogWriter(logFilePath); } + /** + * Notifies all registered listeners that a transaction batch has started. + * + * @param status the status of the transaction batch that has started + */ private void notifyTransactionBatchStarted(ImportTransactionBatchStatus status) { for (ImportEventListener listener : listeners) { listener.onTransactionBatchStarted(status); } } + /** + * Notifies all registered listeners that a transaction batch has completed. + * + * @param batchResult the result of the completed transaction batch + */ private void notifyTransactionBatchCompleted(ImportTransactionBatchResult batchResult) { for (ImportEventListener listener : listeners) { listener.onTransactionBatchCompleted(batchResult); } } + /** + * Updates the status of target results for an aborted transaction batch. For each target with a + * status of SAVED, changes the status to ABORTED and adds an error message. + * + * @param targetResults the list of target results to update + * @return the updated list of target results + */ private List updateTargetStatusForAbortedTransactionBatch( List targetResults) { for (int i = 0; i < targetResults.size(); i++) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java index fc0039bf90..9e0033fb26 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java @@ -3,11 +3,35 @@ import lombok.Builder; import lombok.Value; +/** + * Configuration class for import loggers. This class uses Lombok's {@code @Value} annotation to + * create an immutable class and {@code @Builder} annotation to provide a builder pattern for + * creating instances. + */ @Value @Builder public class ImportLoggerConfig { + /** + * The directory path where log files will be stored. This path should end with a directory + * separator (e.g., "/"). + */ String logDirectoryPath; + + /** + * Whether to log records that were successfully imported. If true, successful import operations + * will be logged to success log files. + */ boolean logSuccessRecords; + + /** + * Whether to log raw source records that failed to be imported. If true, failed import operations + * will be logged to failure log files. + */ boolean logRawSourceRecords; + + /** + * Whether to format the logs with pretty printing. If true, the JSON logs will be formatted with + * indentation for better readability. + */ boolean prettyPrint; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index fc70770761..f5a0a133e9 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -12,6 +12,20 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * An implementation of {@link AbstractImportLogger} that uses a single file for each log type. + * Unlike {@link SplitByDataChunkImportLogger}, this logger creates only three log files: one for + * successful operations, one for failed operations, and one for summary information, regardless of + * the number of data chunks processed. + * + *

The log files are named as follows: + * + *

    + *
  • success.json - Records of successful import operations + *
  • failure.json - Records of failed import operations + *
  • summary.log - Summary information for all data chunks + *
+ */ public class SingleFileImportLogger extends AbstractImportLogger { protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; @@ -22,6 +36,15 @@ public class SingleFileImportLogger extends AbstractImportLogger { private LogWriter successLogWriter; private LogWriter failureLogWriter; + /** + * Creates a new instance of SingleFileImportLogger. Initializes the success and failure log + * writers immediately. The summary log writer is created on demand when the first data chunk is + * completed. + * + * @param config the configuration for the logger + * @param logWriterFactory the factory to create log writers + * @throws IOException if an I/O error occurs while creating the log writers + */ public SingleFileImportLogger(ImportLoggerConfig config, LogWriterFactory logWriterFactory) throws IOException { super(config, logWriterFactory); @@ -29,6 +52,12 @@ public SingleFileImportLogger(ImportLoggerConfig config, LogWriterFactory logWri failureLogWriter = createLogWriter(config.getLogDirectoryPath() + FAILURE_LOG_FILE_NAME); } + /** + * Called when an import task is completed. Writes the task result details to the appropriate log + * files based on the configuration. + * + * @param taskResult the result of the completed import task + */ @Override public void onTaskComplete(ImportTaskResult taskResult) { if (!config.isLogSuccessRecords() && !config.isLogRawSourceRecords()) return; @@ -39,9 +68,21 @@ public void onTaskComplete(ImportTaskResult taskResult) { } } + /** + * Called to add or update the status of a data chunk. This implementation does nothing as the + * status is only logged when the data chunk is completed. + * + * @param status the status of the data chunk + */ @Override public void addOrUpdateDataChunkStatus(ImportDataChunkStatus status) {} + /** + * Called when a data chunk is completed. Logs the summary of the data chunk to the summary log + * file. + * + * @param dataChunkStatus the status of the completed data chunk + */ @Override public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { try { @@ -51,11 +92,17 @@ public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { } } + /** Called when all data chunks are completed. Closes all log writers. */ @Override public void onAllDataChunksCompleted() { closeAllLogWriters(); } + /** + * Logs a transaction batch result to the appropriate log file based on its success status. + * + * @param batchResult the transaction batch result to log + */ @Override protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { try { @@ -67,11 +114,24 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { } } + /** + * Logs an error message with an exception to the logger. + * + * @param errorMessage the error message to log + * @param exception the exception associated with the error + */ @Override protected void logError(String errorMessage, Exception exception) { LOGGER.error(errorMessage, exception); } + /** + * Logs the summary of a data chunk to the summary log file. Creates the summary log writer if it + * doesn't exist yet. + * + * @param dataChunkStatus the status of the data chunk to log + * @throws IOException if an I/O error occurs while writing to the log + */ private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws IOException { if (summaryLogWriter == null) { summaryLogWriter = createLogWriter(config.getLogDirectoryPath() + SUMMARY_LOG_FILE_NAME); @@ -79,12 +139,27 @@ private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws I writeImportDataChunkSummary(dataChunkStatus, summaryLogWriter); } + /** + * Writes the summary of a data chunk to the specified log writer. + * + * @param dataChunkStatus the status of the data chunk to log + * @param logWriter the log writer to write to + * @throws IOException if an I/O error occurs while writing to the log + */ private void writeImportDataChunkSummary( ImportDataChunkStatus dataChunkStatus, LogWriter logWriter) throws IOException { JsonNode jsonNode = OBJECT_MAPPER.valueToTree(dataChunkStatus); writeToLogWriter(logWriter, jsonNode); } + /** + * Gets the appropriate log writer for a transaction batch based on its success status. If the log + * writer doesn't exist yet, it will be created. + * + * @param batchResult the transaction batch result + * @return the log writer for the batch + * @throws IOException if an I/O error occurs while creating a new log writer + */ private LogWriter getLogWriterForTransactionBatch(ImportTransactionBatchResult batchResult) throws IOException { String logFileName = batchResult.isSuccess() ? SUCCESS_LOG_FILE_NAME : FAILURE_LOG_FILE_NAME; @@ -100,6 +175,14 @@ private LogWriter getLogWriterForTransactionBatch(ImportTransactionBatchResult b return logWriter; } + /** + * Writes the details of an import task result to the appropriate log files. Successful targets + * are written to success logs and failed targets to failure logs. The method is synchronized on + * the respective log writers to ensure thread safety. + * + * @param importTaskResult the result of the import task to log + * @throws IOException if an I/O error occurs while writing to the logs + */ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult) throws IOException { JsonNode jsonNode; @@ -123,11 +206,22 @@ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult } } + /** + * Writes a JSON node to a log writer and flushes the writer. + * + * @param logWriter the log writer to write to + * @param jsonNode the JSON node to write + * @throws IOException if an I/O error occurs while writing + */ private void writeToLogWriter(LogWriter logWriter, JsonNode jsonNode) throws IOException { logWriter.write(jsonNode); logWriter.flush(); } + /** + * Closes all log writers and sets them to null. This method is called when all data chunks have + * been completed. + */ private void closeAllLogWriters() { closeLogWriter(summaryLogWriter); closeLogWriter(successLogWriter); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index bec306ef9b..69deaf9445 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -15,6 +15,22 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * An implementation of {@link AbstractImportLogger} that creates separate log files for each data + * chunk. This logger maintains separate log writers for success, failure, and summary logs for each + * data chunk, allowing for better organization and easier tracking of import operations by data + * chunk. + * + *

The log files are named using the following formats: + * + *

    + *
  • Success logs: data_chunk_[id]_success.json + *
  • Failure logs: data_chunk_[id]_failure.json + *
  • Summary logs: data_chunk_[id]_summary.json + *
+ * + *

Log writers are created on demand and closed when their corresponding data chunk is completed. + */ public class SplitByDataChunkImportLogger extends AbstractImportLogger { protected static final String SUMMARY_LOG_FILE_NAME_FORMAT = "data_chunk_%s_summary.json"; @@ -26,11 +42,23 @@ public class SplitByDataChunkImportLogger extends AbstractImportLogger { private final Map successLogWriters = new HashMap<>(); private final Map failureLogWriters = new HashMap<>(); + /** + * Creates a new instance of SplitByDataChunkImportLogger. + * + * @param config the configuration for the logger + * @param logWriterFactory the factory to create log writers + */ public SplitByDataChunkImportLogger( ImportLoggerConfig config, LogWriterFactory logWriterFactory) { super(config, logWriterFactory); } + /** + * Called when an import task is completed. Writes the task result details to the appropriate log + * files based on the configuration. + * + * @param taskResult the result of the completed import task + */ @Override public void onTaskComplete(ImportTaskResult taskResult) { if (!config.isLogSuccessRecords() && !config.isLogRawSourceRecords()) return; @@ -41,6 +69,13 @@ public void onTaskComplete(ImportTaskResult taskResult) { } } + /** + * Writes the details of an import task result to the appropriate log files. Successful targets + * are written to success logs and failed targets to failure logs. + * + * @param importTaskResult the result of the import task to log + * @throws IOException if an I/O error occurs while writing to the logs + */ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult) throws IOException { JsonNode jsonNode; @@ -68,9 +103,21 @@ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult } } + /** + * Called to add or update the status of a data chunk. This implementation does nothing as the + * status is only logged when the data chunk is completed. + * + * @param status the status of the data chunk + */ @Override public void addOrUpdateDataChunkStatus(ImportDataChunkStatus status) {} + /** + * Called when a data chunk is completed. Logs the summary of the data chunk and closes the log + * writers for that data chunk. + * + * @param dataChunkStatus the status of the completed data chunk + */ @Override public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { try { @@ -82,11 +129,18 @@ public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { } } + /** Called when all data chunks are completed. Closes all remaining log writers. */ @Override public void onAllDataChunksCompleted() { closeAllDataChunkLogWriters(); } + /** + * Logs a transaction batch result to the appropriate log file based on its success status. The + * log file is determined by the data chunk ID and whether the batch was successful. + * + * @param batchResult the transaction batch result to log + */ @Override protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { LogFileType logFileType = batchResult.isSuccess() ? LogFileType.SUCCESS : LogFileType.FAILURE; @@ -102,11 +156,23 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { } } + /** + * Logs an error message with an exception to the logger. + * + * @param errorMessage the error message to log + * @param exception the exception associated with the error + */ @Override protected void logError(String errorMessage, Exception exception) { LOGGER.error(errorMessage, exception); } + /** + * Logs the summary of a data chunk to a summary log file. + * + * @param dataChunkStatus the status of the data chunk to log + * @throws IOException if an I/O error occurs while writing to the log + */ private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws IOException { try (LogWriter logWriter = initializeLogWriterIfNeeded(LogFileType.SUMMARY, dataChunkStatus.getDataChunkId())) { @@ -115,12 +181,21 @@ private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws I } } + /** + * Closes and removes the log writers for a specific data chunk. + * + * @param dataChunkId the ID of the data chunk whose log writers should be closed + */ private void closeLogWritersForDataChunk(int dataChunkId) { closeLogWriter(successLogWriters.remove(dataChunkId)); closeLogWriter(failureLogWriters.remove(dataChunkId)); closeLogWriter(summaryLogWriters.remove(dataChunkId)); } + /** + * Closes all log writers for all data chunks and clears the writer maps. This method is called + * when all data chunks have been completed. + */ private void closeAllDataChunkLogWriters() { summaryLogWriters.values().forEach(this::closeLogWriter); successLogWriters.values().forEach(this::closeLogWriter); @@ -130,6 +205,13 @@ private void closeAllDataChunkLogWriters() { failureLogWriters.clear(); } + /** + * Constructs the log file path based on the batch ID and log file type. + * + * @param batchId the ID of the batch (data chunk) + * @param logFileType the type of log file (SUCCESS, FAILURE, or SUMMARY) + * @return the full path to the log file + */ private String getLogFilePath(long batchId, LogFileType logFileType) { String logfilePath; switch (logFileType) { @@ -151,6 +233,15 @@ private String getLogFilePath(long batchId, LogFileType logFileType) { return logfilePath; } + /** + * Gets or creates a log writer for the specified log file type and data chunk ID. If a log writer + * for the specified type and data chunk doesn't exist, it will be created. + * + * @param logFileType the type of log file + * @param dataChunkId the ID of the data chunk + * @return the log writer for the specified type and data chunk + * @throws IOException if an I/O error occurs while creating a new log writer + */ private LogWriter initializeLogWriterIfNeeded(LogFileType logFileType, int dataChunkId) throws IOException { Map logWriters = getLogWriters(logFileType); @@ -161,11 +252,25 @@ private LogWriter initializeLogWriterIfNeeded(LogFileType logFileType, int dataC return logWriters.get(dataChunkId); } + /** + * Creates a new log writer for the specified log file type and data chunk ID. + * + * @param logFileType the type of log file + * @param dataChunkId the ID of the data chunk + * @return a new log writer + * @throws IOException if an I/O error occurs while creating the log writer + */ private LogWriter createLogWriter(LogFileType logFileType, int dataChunkId) throws IOException { String logFilePath = getLogFilePath(dataChunkId, logFileType); return createLogWriter(logFilePath); } + /** + * Gets the appropriate map of log writers for the specified log file type. + * + * @param logFileType the type of log file + * @return the map of log writers for the specified type + */ private Map getLogWriters(LogFileType logFileType) { Map logWriterMap = null; switch (logFileType) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java index 5b2dc77497..b85ee8a33b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java @@ -4,17 +4,25 @@ import java.io.IOException; import lombok.AllArgsConstructor; -/** A factory class to create log writers. */ +/** + * The default implementation of {@link LogWriterFactory} that creates {@link LocalFileLogWriter} + * instances. This factory uses the provided {@link ImportLoggerConfig} to configure the log writers + * it creates. It's annotated with Lombok's {@code @AllArgsConstructor} to automatically generate a + * constructor that initializes the configuration field. + */ @AllArgsConstructor public class DefaultLogWriterFactory implements LogWriterFactory { private final ImportLoggerConfig importLoggerConfig; /** - * Creates a log writer object + * Creates a {@link LocalFileLogWriter} for the specified log file path. The created log writer + * will be configured using the {@link ImportLoggerConfig} that was provided to this factory + * during construction. * - * @param logFilePath the path of the log file - * @return the log writer + * @param logFilePath the path where the log file will be created or appended to + * @return a new {@link LogWriter} instance that writes to the specified file + * @throws IOException if an I/O error occurs while creating the log writer */ @Override public LogWriter createLogWriter(String logFilePath) throws IOException { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java index b29395e8ec..323c02661b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java @@ -10,15 +10,21 @@ import java.nio.file.Paths; import java.nio.file.StandardOpenOption; +/** + * An implementation of {@link LogWriter} that writes log entries to a local file. This class writes + * JSON records to a file as a JSON array, with each record being an element in the array. It + * handles file creation, appending, and proper JSON formatting. + */ public class LocalFileLogWriter implements LogWriter { private final JsonGenerator logWriter; private final DataLoaderObjectMapper objectMapper; /** - * Creates an instance of LocalFileLogWriter with the specified file path and log file type. + * Creates an instance of LocalFileLogWriter with the specified file path and configuration. * - * @param filePath the file path - * @throws IOException if an I/O error occurs + * @param filePath the path where the log file will be created or appended to + * @param importLoggerConfig the configuration for the logger, including formatting options + * @throws IOException if an I/O error occurs while creating or opening the file */ public LocalFileLogWriter(String filePath, ImportLoggerConfig importLoggerConfig) throws IOException { @@ -36,6 +42,13 @@ public LocalFileLogWriter(String filePath, ImportLoggerConfig importLoggerConfig this.logWriter.flush(); } + /** + * Writes a JSON record to the log file. If the source record is null, this method does nothing. + * The method is synchronized to ensure thread safety when writing to the file. + * + * @param sourceRecord the JSON record to write + * @throws IOException if an I/O error occurs while writing the record + */ @Override public void write(JsonNode sourceRecord) throws IOException { if (sourceRecord == null) { @@ -46,11 +59,22 @@ public void write(JsonNode sourceRecord) throws IOException { } } + /** + * Flushes any buffered data to the log file. + * + * @throws IOException if an I/O error occurs while flushing + */ @Override public void flush() throws IOException { logWriter.flush(); } + /** + * Closes the log writer, properly ending the JSON array and releasing resources. If the writer is + * already closed, this method does nothing. + * + * @throws IOException if an I/O error occurs while closing the writer + */ @Override public void close() throws IOException { if (logWriter.isClosed()) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java index 5483aefc91..e56a949dad 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogFileType.java @@ -1,8 +1,25 @@ package com.scalar.db.dataloader.core.dataimport.log.writer; -/** The type of the log writer. */ +/** + * Represents the different types of log files used in the data import process. Each type serves a + * specific purpose in tracking the import operation's results. + */ public enum LogFileType { + /** + * Represents a log file that records successful import operations. These logs contain records + * that were successfully processed and imported. + */ SUCCESS, + + /** + * Represents a log file that records failed import operations. These logs contain records that + * failed to be processed or imported, along with information about the failure. + */ FAILURE, + + /** + * Represents a log file that provides a summary of the import operation. These logs contain + * aggregated statistics and overall results of the import process. + */ SUMMARY } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java index f10917901f..32838f3215 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriter.java @@ -3,12 +3,35 @@ import com.fasterxml.jackson.databind.JsonNode; import java.io.IOException; +/** + * An interface for writing log entries to a destination. This interface extends {@link + * AutoCloseable} to ensure proper resource cleanup. Implementations of this interface handle the + * details of writing log entries to various destinations such as files, databases, or other storage + * systems. + */ public interface LogWriter extends AutoCloseable { + /** + * Writes a JSON record to the log. + * + * @param sourceRecord the JSON record to write + * @throws IOException if an I/O error occurs while writing the record + */ void write(JsonNode sourceRecord) throws IOException; + /** + * Flushes any buffered data to the underlying storage. + * + * @throws IOException if an I/O error occurs while flushing + */ void flush() throws IOException; + /** + * Closes this log writer and releases any system resources associated with it. This method should + * be called when the log writer is no longer needed to ensure proper cleanup of resources. + * + * @throws IOException if an I/O error occurs while closing the log writer + */ @Override void close() throws IOException; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java index b3c4dfc080..3854d728c9 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LogWriterFactory.java @@ -2,7 +2,19 @@ import java.io.IOException; +/** + * A factory interface for creating {@link LogWriter} instances. This interface abstracts the + * creation of log writers, allowing different implementations to create different types of log + * writers based on the application's needs. + */ public interface LogWriterFactory { + /** + * Creates a new log writer for the specified log file path. + * + * @param logFilePath the path where the log file will be created or appended to + * @return a new {@link LogWriter} instance + * @throws IOException if an I/O error occurs while creating the log writer + */ LogWriter createLogWriter(String logFilePath) throws IOException; } From 9958f95ab6d49616ea897356d5c506ecfa53348d Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Mon, 21 Apr 2025 17:19:17 +0530 Subject: [PATCH 06/27] Changes --- .../log/SingleFileImportLogger.java | 51 +++++++----- .../log/SplitByDataChunkImportLogger.java | 81 ++++++++++++------- .../log/writer/LocalFileLogWriter.java | 7 +- 3 files changed, 83 insertions(+), 56 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index f5a0a133e9..ca869d9d9d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -9,6 +9,7 @@ import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; import java.io.IOException; +import javax.annotation.concurrent.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -26,15 +27,16 @@ *

  • summary.log - Summary information for all data chunks * */ +@ThreadSafe public class SingleFileImportLogger extends AbstractImportLogger { protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLogger.class); - private LogWriter summaryLogWriter; - private LogWriter successLogWriter; - private LogWriter failureLogWriter; + private volatile LogWriter summaryLogWriter; + private final LogWriter successLogWriter; + private final LogWriter failureLogWriter; /** * Creates a new instance of SingleFileImportLogger. Initializes the success and failure log @@ -133,10 +135,25 @@ protected void logError(String errorMessage, Exception exception) { * @throws IOException if an I/O error occurs while writing to the log */ private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws IOException { + ensureSummaryLogWriterInitialized(); + synchronized (summaryLogWriter) { + writeImportDataChunkSummary(dataChunkStatus, summaryLogWriter); + } + } + + /** + * Ensures that the summary log writer is initialized in a thread-safe manner. + * + * @throws IOException if an error occurs while creating the log writer + */ + private void ensureSummaryLogWriterInitialized() throws IOException { if (summaryLogWriter == null) { - summaryLogWriter = createLogWriter(config.getLogDirectoryPath() + SUMMARY_LOG_FILE_NAME); + synchronized (this) { + if (summaryLogWriter == null) { + summaryLogWriter = createLogWriter(config.getLogDirectoryPath() + SUMMARY_LOG_FILE_NAME); + } + } } - writeImportDataChunkSummary(dataChunkStatus, summaryLogWriter); } /** @@ -162,17 +179,7 @@ private void writeImportDataChunkSummary( */ private LogWriter getLogWriterForTransactionBatch(ImportTransactionBatchResult batchResult) throws IOException { - String logFileName = batchResult.isSuccess() ? SUCCESS_LOG_FILE_NAME : FAILURE_LOG_FILE_NAME; - LogWriter logWriter = batchResult.isSuccess() ? successLogWriter : failureLogWriter; - if (logWriter == null) { - logWriter = createLogWriter(config.getLogDirectoryPath() + logFileName); - if (batchResult.isSuccess()) { - successLogWriter = logWriter; - } else { - failureLogWriter = logWriter; - } - } - return logWriter; + return batchResult.isSuccess() ? successLogWriter : failureLogWriter; } /** @@ -223,11 +230,11 @@ private void writeToLogWriter(LogWriter logWriter, JsonNode jsonNode) throws IOE * been completed. */ private void closeAllLogWriters() { - closeLogWriter(summaryLogWriter); - closeLogWriter(successLogWriter); - closeLogWriter(failureLogWriter); - summaryLogWriter = null; - successLogWriter = null; - failureLogWriter = null; + synchronized (this) { + closeLogWriter(summaryLogWriter); + closeLogWriter(successLogWriter); + closeLogWriter(failureLogWriter); + summaryLogWriter = null; + } } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index 69deaf9445..6e64a269b6 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -10,8 +10,10 @@ import com.scalar.db.dataloader.core.dataimport.task.result.ImportTaskResult; import com.scalar.db.dataloader.core.dataimport.transactionbatch.ImportTransactionBatchResult; import java.io.IOException; -import java.util.HashMap; +import java.io.UncheckedIOException; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import javax.annotation.concurrent.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +33,7 @@ * *

    Log writers are created on demand and closed when their corresponding data chunk is completed. */ +@ThreadSafe public class SplitByDataChunkImportLogger extends AbstractImportLogger { protected static final String SUMMARY_LOG_FILE_NAME_FORMAT = "data_chunk_%s_summary.json"; @@ -38,9 +41,9 @@ public class SplitByDataChunkImportLogger extends AbstractImportLogger { protected static final String SUCCESS_LOG_FILE_NAME_FORMAT = "data_chunk_%s_success.json"; private static final Logger LOGGER = LoggerFactory.getLogger(SplitByDataChunkImportLogger.class); - private final Map summaryLogWriters = new HashMap<>(); - private final Map successLogWriters = new HashMap<>(); - private final Map failureLogWriters = new HashMap<>(); + private final Map summaryLogWriters = new ConcurrentHashMap<>(); + private final Map successLogWriters = new ConcurrentHashMap<>(); + private final Map failureLogWriters = new ConcurrentHashMap<>(); /** * Creates a new instance of SplitByDataChunkImportLogger. @@ -65,7 +68,7 @@ public void onTaskComplete(ImportTaskResult taskResult) { try { writeImportTaskResultDetailToLogs(taskResult); } catch (IOException e) { - LOGGER.error("Failed to write success/failure logs"); + LOGGER.error("Failed to write success/failure logs", e); } } @@ -78,31 +81,39 @@ public void onTaskComplete(ImportTaskResult taskResult) { */ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult) throws IOException { - JsonNode jsonNode; for (ImportTargetResult target : importTaskResult.getTargets()) { - if (config.isLogSuccessRecords() - && target.getStatus().equals(ImportTargetResultStatus.SAVED)) { - jsonNode = OBJECT_MAPPER.valueToTree(target); - synchronized (successLogWriters) { - LogWriter successLogWriter = - initializeLogWriterIfNeeded(LogFileType.SUCCESS, importTaskResult.getDataChunkId()); - successLogWriter.write(jsonNode); - successLogWriter.flush(); - } - } - if (config.isLogRawSourceRecords() - && !target.getStatus().equals(ImportTargetResultStatus.SAVED)) { - jsonNode = OBJECT_MAPPER.valueToTree(target); - synchronized (failureLogWriters) { - LogWriter failureLogWriter = - initializeLogWriterIfNeeded(LogFileType.FAILURE, importTaskResult.getDataChunkId()); - failureLogWriter.write(jsonNode); - failureLogWriter.flush(); - } + ImportTargetResultStatus status = target.getStatus(); + if (status.equals(ImportTargetResultStatus.SAVED) && config.isLogSuccessRecords()) { + writeLog(target, LogFileType.SUCCESS, importTaskResult.getDataChunkId()); + } else if (!status.equals(ImportTargetResultStatus.SAVED) && config.isLogRawSourceRecords()) { + writeLog(target, LogFileType.FAILURE, importTaskResult.getDataChunkId()); } } } + /** + * Serializes the given {@link ImportTargetResult} to JSON and writes it to a log file + * corresponding to the provided {@link LogFileType} and data chunk ID. + * + *

    This method ensures thread-safe access to the underlying {@link LogWriter} by synchronizing + * on the writer instance. It is safe to call concurrently from multiple threads handling the same + * or different data chunks. + * + * @param target the result of processing a single import target to be logged + * @param logFileType the type of log file to write to (e.g., SUCCESS or FAILURE) + * @param dataChunkId the ID of the data chunk associated with the log entry + * @throws IOException if writing or flushing the log fails + */ + private void writeLog(ImportTargetResult target, LogFileType logFileType, int dataChunkId) + throws IOException { + JsonNode jsonNode = OBJECT_MAPPER.valueToTree(target); + LogWriter writer = initializeLogWriterIfNeeded(logFileType, dataChunkId); + synchronized (writer) { + writer.write(jsonNode); + writer.flush(); + } + } + /** * Called to add or update the status of a data chunk. This implementation does nothing as the * status is only logged when the data chunk is completed. @@ -245,11 +256,19 @@ private String getLogFilePath(long batchId, LogFileType logFileType) { private LogWriter initializeLogWriterIfNeeded(LogFileType logFileType, int dataChunkId) throws IOException { Map logWriters = getLogWriters(logFileType); - if (!logWriters.containsKey(dataChunkId)) { - LogWriter logWriter = createLogWriter(logFileType, dataChunkId); - logWriters.put(dataChunkId, logWriter); + try { + return logWriters.computeIfAbsent( + dataChunkId, + id -> { + try { + return createLogWriter(logFileType, id); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } catch (UncheckedIOException e) { + throw e.getCause(); } - return logWriters.get(dataChunkId); } /** @@ -272,7 +291,7 @@ private LogWriter createLogWriter(LogFileType logFileType, int dataChunkId) thro * @return the map of log writers for the specified type */ private Map getLogWriters(LogFileType logFileType) { - Map logWriterMap = null; + Map logWriterMap; switch (logFileType) { case SUCCESS: logWriterMap = successLogWriters; @@ -283,6 +302,8 @@ private Map getLogWriters(LogFileType logFileType) { case SUMMARY: logWriterMap = summaryLogWriters; break; + default: + throw new AssertionError(); } return logWriterMap; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java index 323c02661b..1c9725dc3c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java @@ -9,6 +9,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; +import javax.annotation.Nullable; /** * An implementation of {@link LogWriter} that writes log entries to a local file. This class writes @@ -50,13 +51,11 @@ public LocalFileLogWriter(String filePath, ImportLoggerConfig importLoggerConfig * @throws IOException if an I/O error occurs while writing the record */ @Override - public void write(JsonNode sourceRecord) throws IOException { + public void write(@Nullable JsonNode sourceRecord) throws IOException { if (sourceRecord == null) { return; } - synchronized (logWriter) { - objectMapper.writeValue(logWriter, sourceRecord); - } + objectMapper.writeValue(logWriter, sourceRecord); } /** From 1afbc21ab306324bced7a932b161d8e3935939d1 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Mon, 21 Apr 2025 17:40:31 +0530 Subject: [PATCH 07/27] Renamed parameters --- .../core/dataimport/log/ImportLoggerConfig.java | 4 ++-- .../dataimport/log/SingleFileImportLoggerTest.java | 12 ++++++------ .../log/SplitByDataChunkImportLoggerTest.java | 12 ++++++------ .../log/writer/DefaultLogWriterFactoryTest.java | 4 ++-- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java index 9e0033fb26..f33c4ba188 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java @@ -21,13 +21,13 @@ public class ImportLoggerConfig { * Whether to log records that were successfully imported. If true, successful import operations * will be logged to success log files. */ - boolean logSuccessRecords; + boolean isLogSuccessRecords; /** * Whether to log raw source records that failed to be imported. If true, failed import operations * will be logged to failure log files. */ - boolean logRawSourceRecords; + boolean isLogRawSourceRecords; /** * Whether to format the logs with pretty printing. If true, the JSON logs will be formatted with diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java index e2d17dfa4f..69b9454477 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java @@ -42,8 +42,8 @@ void setUp() { ImportLoggerConfig importLoggerConfig = ImportLoggerConfig.builder() .prettyPrint(false) - .logSuccessRecords(false) - .logRawSourceRecords(false) + .isLogSuccessRecords(false) + .isLogRawSourceRecords(false) .logDirectoryPath("path") .build(); logWriterFactory = new DefaultLogWriterFactory(importLoggerConfig); @@ -84,8 +84,8 @@ private void testTransactionBatchCompleted(boolean success, boolean logSuccessRe ImportLoggerConfig config = ImportLoggerConfig.builder() .logDirectoryPath(tempDir.toString() + "/") - .logRawSourceRecords(true) - .logSuccessRecords(logSuccessRecords) + .isLogRawSourceRecords(true) + .isLogSuccessRecords(logSuccessRecords) .build(); SingleFileImportLogger importLogger = new SingleFileImportLogger(config, logWriterFactory); @@ -199,8 +199,8 @@ private void testDataChunkCompleted(boolean hasErrors) throws IOException { ImportLoggerConfig config = ImportLoggerConfig.builder() .logDirectoryPath(tempDir.toString() + "/") - .logRawSourceRecords(true) - .logSuccessRecords(true) + .isLogRawSourceRecords(true) + .isLogSuccessRecords(true) .build(); SingleFileImportLogger importLogger = new SingleFileImportLogger(config, logWriterFactory); diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java index 04d9906641..de7ee49be3 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLoggerTest.java @@ -38,8 +38,8 @@ void setUp() { ImportLoggerConfig importLoggerConfig = ImportLoggerConfig.builder() .prettyPrint(false) - .logSuccessRecords(false) - .logRawSourceRecords(false) + .isLogSuccessRecords(false) + .isLogRawSourceRecords(false) .logDirectoryPath("path") .build(); logWriterFactory = new DefaultLogWriterFactory(importLoggerConfig); @@ -69,8 +69,8 @@ private void testTransactionBatchCompleted(boolean success, boolean logSuccessRe ImportLoggerConfig config = ImportLoggerConfig.builder() .logDirectoryPath(tempDir.toString() + "/") - .logRawSourceRecords(true) - .logSuccessRecords(logSuccessRecords) + .isLogRawSourceRecords(true) + .isLogSuccessRecords(logSuccessRecords) .build(); SplitByDataChunkImportLogger importLogger = new SplitByDataChunkImportLogger(config, logWriterFactory); @@ -175,8 +175,8 @@ private void testDataChunkCompleted(String logFilePattern, boolean hasErrors) th ImportLoggerConfig config = ImportLoggerConfig.builder() .logDirectoryPath(tempDir.toString() + "/") - .logRawSourceRecords(true) - .logSuccessRecords(true) + .isLogRawSourceRecords(true) + .isLogSuccessRecords(true) .build(); SplitByDataChunkImportLogger importLogger = new SplitByDataChunkImportLogger(config, logWriterFactory); diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java index 3b99510a7e..28c31e5c03 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactoryTest.java @@ -28,8 +28,8 @@ void createLogWriter_withValidLocalLogFilePath_shouldReturnLocalFileLogWriterObj new DefaultLogWriterFactory( ImportLoggerConfig.builder() .prettyPrint(false) - .logSuccessRecords(false) - .logRawSourceRecords(false) + .isLogSuccessRecords(false) + .isLogRawSourceRecords(false) .logDirectoryPath("path") .build()); LogWriter logWriter = defaultLogWriterFactory.createLogWriter(filePath); From 8c5114d19095edff00800a1ffa9f81950d9a14c0 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Mon, 21 Apr 2025 17:49:36 +0530 Subject: [PATCH 08/27] logging changes --- .../core/dataimport/log/SplitByDataChunkImportLogger.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index 6e64a269b6..2024beeb82 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -68,7 +68,7 @@ public void onTaskComplete(ImportTaskResult taskResult) { try { writeImportTaskResultDetailToLogs(taskResult); } catch (IOException e) { - LOGGER.error("Failed to write success/failure logs", e); + logError("Failed to write success/failure logs", e); } } @@ -136,7 +136,7 @@ public void onDataChunkCompleted(ImportDataChunkStatus dataChunkStatus) { // Close the split log writers per data chunk if they exist for this data chunk id closeLogWritersForDataChunk(dataChunkStatus.getDataChunkId()); } catch (IOException e) { - LOGGER.error("Failed to log the data chunk summary", e); + logError("Failed to log the data chunk summary", e); } } @@ -163,7 +163,7 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { logWriter.flush(); } } catch (IOException e) { - LOGGER.error("Failed to write a transaction batch record to a split mode log file", e); + logError("Failed to write a transaction batch record to a split mode log file", e); } } From ffab395dd6d95c14f8c5a868dbf83a1bced3b928 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Tue, 22 Apr 2025 08:51:21 +0530 Subject: [PATCH 09/27] removed repeated code --- .../dataloader/core/dataimport/log/AbstractImportLogger.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java index 4a6121a97b..74d2537685 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -127,8 +127,7 @@ protected JsonNode createFilteredTransactionBatchLogJsonNode( ImportTaskResult.builder() .rowNumber(taskResult.getRowNumber()) .targets(targetResults) - .dataChunkId(taskResult.getDataChunkId()) - .rowNumber(taskResult.getRowNumber()); + .dataChunkId(taskResult.getDataChunkId()); // Only add the raw record if the configuration is set to log raw source data if (config.isLogRawSourceRecords()) { From 6dd213e9e9c33e94ab25f474b2350f7bdf0c5f90 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 23 Apr 2025 14:23:15 +0530 Subject: [PATCH 10/27] Added excetpion throw --- .../dataloader/core/dataimport/log/SingleFileImportLogger.java | 1 + .../core/dataimport/log/SplitByDataChunkImportLogger.java | 1 + 2 files changed, 2 insertions(+) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index ca869d9d9d..502227b1ec 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -125,6 +125,7 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { @Override protected void logError(String errorMessage, Exception exception) { LOGGER.error(errorMessage, exception); + throw new RuntimeException(errorMessage, exception); } /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index 2024beeb82..1ab63a2520 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -176,6 +176,7 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { @Override protected void logError(String errorMessage, Exception exception) { LOGGER.error(errorMessage, exception); + throw new RuntimeException(errorMessage, exception); } /** From 65421779019b2031488223320e048ee55c61bf7e Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 25 Apr 2025 12:15:24 +0530 Subject: [PATCH 11/27] Synchronisation changes --- .../log/SingleFileImportLogger.java | 20 +++++-------------- .../log/SplitByDataChunkImportLogger.java | 14 ++++--------- .../log/writer/LocalFileLogWriter.java | 6 +++--- 3 files changed, 12 insertions(+), 28 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index 502227b1ec..15a63bbf9f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -34,7 +34,7 @@ public class SingleFileImportLogger extends AbstractImportLogger { protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLogger.class); - private volatile LogWriter summaryLogWriter; + private LogWriter summaryLogWriter; private final LogWriter successLogWriter; private final LogWriter failureLogWriter; @@ -137,9 +137,7 @@ protected void logError(String errorMessage, Exception exception) { */ private void logDataChunkSummary(ImportDataChunkStatus dataChunkStatus) throws IOException { ensureSummaryLogWriterInitialized(); - synchronized (summaryLogWriter) { - writeImportDataChunkSummary(dataChunkStatus, summaryLogWriter); - } + writeImportDataChunkSummary(dataChunkStatus, summaryLogWriter); } /** @@ -197,19 +195,12 @@ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult for (ImportTargetResult target : importTaskResult.getTargets()) { if (config.isLogSuccessRecords() && target.getStatus().equals(ImportTargetResultStatus.SAVED)) { - synchronized (successLogWriter) { - jsonNode = OBJECT_MAPPER.valueToTree(target); - successLogWriter.write(jsonNode); - successLogWriter.flush(); - } + + writeToLogWriter(successLogWriter, OBJECT_MAPPER.valueToTree(target)); } if (config.isLogRawSourceRecords() && !target.getStatus().equals(ImportTargetResultStatus.SAVED)) { - synchronized (failureLogWriter) { - jsonNode = OBJECT_MAPPER.valueToTree(target); - failureLogWriter.write(jsonNode); - failureLogWriter.flush(); - } + writeToLogWriter(failureLogWriter, OBJECT_MAPPER.valueToTree(target)); } } } @@ -223,7 +214,6 @@ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult */ private void writeToLogWriter(LogWriter logWriter, JsonNode jsonNode) throws IOException { logWriter.write(jsonNode); - logWriter.flush(); } /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index 1ab63a2520..ab702babbf 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -108,10 +108,7 @@ private void writeLog(ImportTargetResult target, LogFileType logFileType, int da throws IOException { JsonNode jsonNode = OBJECT_MAPPER.valueToTree(target); LogWriter writer = initializeLogWriterIfNeeded(logFileType, dataChunkId); - synchronized (writer) { - writer.write(jsonNode); - writer.flush(); - } + writer.write(jsonNode); } /** @@ -155,13 +152,10 @@ public void onAllDataChunksCompleted() { @Override protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { LogFileType logFileType = batchResult.isSuccess() ? LogFileType.SUCCESS : LogFileType.FAILURE; - try (LogWriter logWriter = - initializeLogWriterIfNeeded(logFileType, batchResult.getDataChunkId())) { + try { + LogWriter logWriter = initializeLogWriterIfNeeded(logFileType, batchResult.getDataChunkId()); JsonNode jsonNode = createFilteredTransactionBatchLogJsonNode(batchResult); - synchronized (logWriter) { - logWriter.write(jsonNode); - logWriter.flush(); - } + logWriter.write(jsonNode); } catch (IOException e) { logError("Failed to write a transaction batch record to a split mode log file", e); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java index 1c9725dc3c..3689bd51d1 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/LocalFileLogWriter.java @@ -51,7 +51,7 @@ public LocalFileLogWriter(String filePath, ImportLoggerConfig importLoggerConfig * @throws IOException if an I/O error occurs while writing the record */ @Override - public void write(@Nullable JsonNode sourceRecord) throws IOException { + public synchronized void write(@Nullable JsonNode sourceRecord) throws IOException { if (sourceRecord == null) { return; } @@ -64,7 +64,7 @@ public void write(@Nullable JsonNode sourceRecord) throws IOException { * @throws IOException if an I/O error occurs while flushing */ @Override - public void flush() throws IOException { + public synchronized void flush() throws IOException { logWriter.flush(); } @@ -75,7 +75,7 @@ public void flush() throws IOException { * @throws IOException if an I/O error occurs while closing the writer */ @Override - public void close() throws IOException { + public synchronized void close() throws IOException { if (logWriter.isClosed()) { return; } From 603e46ead1a70ca780a8f67f2f46bf4aa1b0f4e2 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 25 Apr 2025 12:31:20 +0530 Subject: [PATCH 12/27] Added volatile back to fix spotbugs issue --- .../dataloader/core/dataimport/log/SingleFileImportLogger.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index 15a63bbf9f..7aae529bb5 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -34,7 +34,7 @@ public class SingleFileImportLogger extends AbstractImportLogger { protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLogger.class); - private LogWriter summaryLogWriter; + private volatile LogWriter summaryLogWriter; private final LogWriter successLogWriter; private final LogWriter failureLogWriter; From eaf9d88c224ce4830dbeb896b21061e4c060ee85 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 25 Apr 2025 12:44:21 +0530 Subject: [PATCH 13/27] Removed unused variable --- .../dataloader/core/dataimport/log/SingleFileImportLogger.java | 1 - 1 file changed, 1 deletion(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index 7aae529bb5..151c329642 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -191,7 +191,6 @@ private LogWriter getLogWriterForTransactionBatch(ImportTransactionBatchResult b */ private void writeImportTaskResultDetailToLogs(ImportTaskResult importTaskResult) throws IOException { - JsonNode jsonNode; for (ImportTargetResult target : importTaskResult.getTargets()) { if (config.isLogSuccessRecords() && target.getStatus().equals(ImportTargetResultStatus.SAVED)) { From 502034ece2f62847370d8c4a63f385ff7211446e Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 30 Apr 2025 09:39:32 +0530 Subject: [PATCH 14/27] Chanaged LOGGER to logger --- .../core/dataimport/log/SingleFileImportLogger.java | 4 ++-- .../core/dataimport/log/SplitByDataChunkImportLogger.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index 151c329642..cdd7a66d03 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -33,7 +33,7 @@ public class SingleFileImportLogger extends AbstractImportLogger { protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; - private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLogger.class); + private static final Logger logger = LoggerFactory.getLogger(SingleFileImportLogger.class); private volatile LogWriter summaryLogWriter; private final LogWriter successLogWriter; private final LogWriter failureLogWriter; @@ -124,7 +124,7 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { */ @Override protected void logError(String errorMessage, Exception exception) { - LOGGER.error(errorMessage, exception); + logger.error(errorMessage, exception); throw new RuntimeException(errorMessage, exception); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index ab702babbf..99d1fe70d8 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -40,7 +40,7 @@ public class SplitByDataChunkImportLogger extends AbstractImportLogger { protected static final String FAILURE_LOG_FILE_NAME_FORMAT = "data_chunk_%s_failure.json"; protected static final String SUCCESS_LOG_FILE_NAME_FORMAT = "data_chunk_%s_success.json"; - private static final Logger LOGGER = LoggerFactory.getLogger(SplitByDataChunkImportLogger.class); + private static final Logger logger = LoggerFactory.getLogger(SplitByDataChunkImportLogger.class); private final Map summaryLogWriters = new ConcurrentHashMap<>(); private final Map successLogWriters = new ConcurrentHashMap<>(); private final Map failureLogWriters = new ConcurrentHashMap<>(); @@ -169,7 +169,7 @@ protected void logTransactionBatch(ImportTransactionBatchResult batchResult) { */ @Override protected void logError(String errorMessage, Exception exception) { - LOGGER.error(errorMessage, exception); + logger.error(errorMessage, exception); throw new RuntimeException(errorMessage, exception); } From 6b22d1acbaf02167cbf2bd18fbfaaef66418b838 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 30 Apr 2025 09:42:39 +0530 Subject: [PATCH 15/27] logger name change in test --- .../core/dataimport/log/SingleFileImportLoggerTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java index 69b9454477..d03f04fbb0 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLoggerTest.java @@ -30,7 +30,7 @@ class SingleFileImportLoggerTest { - private static final Logger LOGGER = LoggerFactory.getLogger(SingleFileImportLoggerTest.class); + private static final Logger logger = LoggerFactory.getLogger(SingleFileImportLoggerTest.class); private static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); @TempDir Path tempDir; @@ -64,7 +64,7 @@ private void deleteFile(Path file) { try { Files.deleteIfExists(file); } catch (IOException e) { - LOGGER.error("Failed to delete file: {}", file, e); + logger.error("Failed to delete file: {}", file, e); } } From 7d5ad8c9a47d53cf7b4bd7c4c48f3221a3f2b34d Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 30 Apr 2025 09:49:19 +0530 Subject: [PATCH 16/27] Fix warnings and rename logger --- .../core/dataimport/processor/ImportProcessor.java | 8 ++++---- .../core/dataimport/processor/CsvImportProcessorTest.java | 5 +++-- .../dataimport/processor/JsonImportProcessorTest.java | 5 +++-- .../processor/JsonLinesImportProcessorTest.java | 5 +++-- gradle/spotbugs-exclude.xml | 2 +- 5 files changed, 14 insertions(+), 11 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java index 1a317a1a82..1175b192db 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java @@ -41,7 +41,7 @@ public abstract class ImportProcessor { final ImportProcessorParams params; - private static final Logger LOGGER = LoggerFactory.getLogger(ImportProcessor.class); + private static final Logger logger = LoggerFactory.getLogger(ImportProcessor.class); private final List listeners = new ArrayList<>(); /** @@ -232,13 +232,13 @@ private ImportTransactionBatchResult processTransactionBatch( } catch (TransactionException e) { isSuccess = false; - LOGGER.error(e.getMessage()); + logger.error(e.getMessage()); try { if (transaction != null) { transaction.abort(); // Ensure transaction is aborted } } catch (TransactionException abortException) { - LOGGER.error( + logger.error( "Failed to abort transaction: {}", abortException.getMessage(), abortException); } error = e.getMessage(); @@ -446,7 +446,7 @@ private void waitForFuturesToComplete(List> futures) { try { future.get(); } catch (Exception e) { - LOGGER.error(e.getMessage()); + logger.error(e.getMessage()); } } } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java index 94acd20ace..9ff35d4f17 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java @@ -1,5 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.processor; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -95,7 +96,7 @@ void test_importProcessWithStorage() { csvImportProcessor = new CsvImportProcessor(params); Map statusList = csvImportProcessor.process(5, 1, UnitTestUtils.getCsvReader()); - assert statusList != null; + assertThat(statusList).isNotNull(); Assertions.assertEquals(1, statusList.size()); } @@ -115,7 +116,7 @@ void test_importProcessWithTransaction() { csvImportProcessor = new CsvImportProcessor(params); Map statusList = csvImportProcessor.process(5, 1, UnitTestUtils.getCsvReader()); - assert statusList != null; + assertThat(statusList).isNotNull(); Assertions.assertEquals(1, statusList.size()); } } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java index aa9a106a0c..4cc9db6558 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java @@ -1,5 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.processor; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -95,7 +96,7 @@ void test_importProcessWithStorage() { jsonImportProcessor = new JsonImportProcessor(params); Map statusList = jsonImportProcessor.process(5, 1, UnitTestUtils.getJsonReader()); - assert statusList != null; + assertThat(statusList).isNotNull(); Assertions.assertEquals(1, statusList.size()); } @@ -115,7 +116,7 @@ void test_importProcessWithTransaction() { jsonImportProcessor = new JsonImportProcessor(params); Map statusList = jsonImportProcessor.process(5, 1, UnitTestUtils.getJsonReader()); - assert statusList != null; + assertThat(statusList).isNotNull(); Assertions.assertEquals(1, statusList.size()); } } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java index e3db391756..b2ca390076 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java @@ -1,5 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.processor; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -95,7 +96,7 @@ void test_importProcessWithStorage() { jsonLinesImportProcessor = new JsonLinesImportProcessor(params); Map statusList = jsonLinesImportProcessor.process(5, 1, UnitTestUtils.getJsonLinesReader()); - assert statusList != null; + assertThat(statusList).isNotNull(); Assertions.assertEquals(1, statusList.size()); } @@ -115,7 +116,7 @@ void test_importProcessWithTransaction() { jsonLinesImportProcessor = new JsonLinesImportProcessor(params); Map statusList = jsonLinesImportProcessor.process(5, 1, UnitTestUtils.getJsonLinesReader()); - assert statusList != null; + assertThat(statusList).isNotNull(); Assertions.assertEquals(1, statusList.size()); } } diff --git a/gradle/spotbugs-exclude.xml b/gradle/spotbugs-exclude.xml index 23254eb3ab..bab1669d82 100644 --- a/gradle/spotbugs-exclude.xml +++ b/gradle/spotbugs-exclude.xml @@ -37,7 +37,7 @@ - + From afd58f4f4aea5e57ec8d96daa88f174972c61026 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 30 Apr 2025 17:12:25 +0530 Subject: [PATCH 17/27] Fix added for warning and supress warning added for lombok related warnings --- .../scalar/db/dataloader/core/ColumnInfo.java | 1 + .../scalar/db/dataloader/core/ScanRange.java | 1 + .../core/dataexport/ExportManager.java | 1 + .../core/dataexport/ExportOptions.java | 13 +++++++++-- .../producer/JsonLineProducerTask.java | 3 --- .../dataexport/producer/JsonProducerTask.java | 3 --- .../dataexport/producer/ProducerResult.java | 13 ----------- .../producer/ProducerTaskFactory.java | 1 + .../validation/ExportOptionsValidator.java | 1 + .../core/dataimport/ImportManager.java | 1 + .../core/dataimport/ImportOptions.java | 3 ++- .../dataimport/controlfile/ControlFile.java | 1 + .../controlfile/ControlFileTable.java | 1 + .../ControlFileTableFieldMapping.java | 1 + .../dataimport/datachunk/ImportDataChunk.java | 3 ++- .../datachunk/ImportDataChunkStatus.java | 3 ++- .../core/dataimport/datachunk/ImportRow.java | 3 ++- .../dataimport/log/AbstractImportLogger.java | 1 + .../dataimport/log/ImportLoggerConfig.java | 1 + .../log/writer/DefaultLogWriterFactory.java | 1 + .../dataimport/processor/ImportProcessor.java | 1 + .../processor/ImportProcessorParams.java | 1 + .../core/dataimport/task/ImportTask.java | 4 +++- .../dataimport/task/ImportTaskParams.java | 1 + .../task/ImportTransactionalTask.java | 22 ------------------- .../task/result/ImportTargetResult.java | 2 ++ .../task/result/ImportTaskResult.java | 1 + .../ImportSourceRecordValidationResult.java | 18 ++++++++++++--- .../ImportSourceRecordValidator.java | 1 + .../ImportTransactionBatch.java | 1 + .../ImportTransactionBatchResult.java | 1 + .../ImportTransactionBatchStatus.java | 3 ++- .../tablemetadata/TableMetadataRequest.java | 1 + .../tablemetadata/TableMetadataService.java | 1 + .../core/util/TableMetadataUtil.java | 1 + 35 files changed, 63 insertions(+), 52 deletions(-) delete mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java index 685f58a833..080d142939 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java @@ -9,6 +9,7 @@ *

    This class holds the metadata for a column, including the namespace (schema), table name, and * the column name within the table. */ +@SuppressWarnings("SameNameButDifferent") @Value @Builder public class ColumnInfo { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java index b1ae7b02d6..726ca0e7f3 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java @@ -4,6 +4,7 @@ import lombok.Value; /** * The scan range which is used in data export scan filtering */ +@SuppressWarnings("SameNameButDifferent") @Value public class ScanRange { /** The key for scan start filter */ diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java index f66efdc9de..f228b8fc12 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java @@ -28,6 +28,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@SuppressWarnings({"SameNameButDifferent", "FutureReturnValueIgnored"}) @RequiredArgsConstructor public abstract class ExportManager { private static final Logger logger = LoggerFactory.getLogger(ExportManager.class); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java index da515cf3c2..b7cad03fa4 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java @@ -9,8 +9,8 @@ import lombok.Builder; import lombok.Data; -/** Options for a ScalarDB export data operation */ -@SuppressWarnings("SameNameButDifferent") +/** Options for a ScalarDB export data operation. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder(builderMethodName = "hiddenBuilder") @Data public class ExportOptions { @@ -31,6 +31,15 @@ public class ExportOptions { @Builder.Default private List projectionColumns = Collections.emptyList(); private List sortOrders; + /** + * Generates and returns an export options builder. + * + * @param namespace namespaces for export + * @param tableName tableName for export + * @param scanPartitionKey scan partition key for export + * @param outputFileFormat output file format for export + * @return a configured export options builder + */ public static ExportOptionsBuilder builder( String namespace, String tableName, Key scanPartitionKey, FileFormat outputFileFormat) { return hiddenBuilder() diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java index 689084698e..4e19cd464e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java @@ -15,13 +15,10 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class JsonLineProducerTask extends ProducerTask { private final DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); - private static final Logger logger = LoggerFactory.getLogger(JsonLineProducerTask.class); /** * Class constructor diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java index 9fb8014c6e..4ea10130f9 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java @@ -16,14 +16,11 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class JsonProducerTask extends ProducerTask { private final DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); private final boolean prettyPrintJson; - private static final Logger logger = LoggerFactory.getLogger(JsonProducerTask.class); /** * Class constructor diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java deleted file mode 100644 index 9506fcd722..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.scalar.db.dataloader.core.dataexport.producer; - -import com.fasterxml.jackson.databind.JsonNode; -import lombok.Builder; -import lombok.Value; - -@Builder -@Value -public class ProducerResult { - JsonNode jsonNode; - String csvSource; - boolean poisonPill; -} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java index 18adc8de6d..f4577e463c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java @@ -8,6 +8,7 @@ import java.util.Map; import lombok.RequiredArgsConstructor; +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public class ProducerTaskFactory { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java index 7bf7645b0e..1a04071600 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java @@ -17,6 +17,7 @@ * A validator for ensuring that export options are consistent with the ScalarDB table metadata and * follow the defined constraints. */ +@SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class ExportOptionsValidator { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java index f1984d6c26..8b08fbb531 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java @@ -35,6 +35,7 @@ *

  • Notifying listeners of various import events * */ +@SuppressWarnings("SameNameButDifferent") @AllArgsConstructor public class ImportManager implements ImportEventListener { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java index 6d3206765e..359fb1f881 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java @@ -7,7 +7,8 @@ import lombok.Builder; import lombok.Data; -/** Import options to import data into one or more ScalarDB tables */ +/** Import options to import data into one or more ScalarDB tables. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Data public class ImportOptions { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java index 6a2229c186..a6888abfb1 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java @@ -11,6 +11,7 @@ * Represents a control file that holds control file tables which contains the column mappings that * maps a source file column to the actual database table column. */ +@SuppressWarnings("SameNameButDifferent") @Getter @Setter public class ControlFile { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java index efcfb0bc00..c65d05887e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java @@ -12,6 +12,7 @@ * table name, and field mappings. This class is used to define how data from a control file maps to * a specific table in ScalarDB. */ +@SuppressWarnings("SameNameButDifferent") @Getter @Setter public class ControlFileTable { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java index 1068573304..74785579ec 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java @@ -10,6 +10,7 @@ * This class defines how data from a specific field in the input source should be mapped to the * corresponding column in the database. */ +@SuppressWarnings("SameNameButDifferent") @Getter @Setter public class ControlFileTableFieldMapping { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java index 69ed97421a..2ab6539d68 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java @@ -4,7 +4,8 @@ import lombok.Builder; import lombok.Data; -/** * Import data chunk data */ +/** * Import data chunk data. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Data @Builder public class ImportDataChunk { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java index d6db3e1e7f..0009f71cd1 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java @@ -6,7 +6,8 @@ import lombok.Builder; import lombok.Data; -/** * A DTO to store import data chunk details */ +/** * A DTO to store import data chunk details. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Data @Builder @JsonDeserialize(builder = ImportDataChunkStatus.ImportDataChunkStatusBuilder.class) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java index 824ca4ffa4..84bcd0af39 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java @@ -3,7 +3,8 @@ import com.fasterxml.jackson.databind.JsonNode; import lombok.Value; -/** Stores data related to a single row on import file */ +/** Stores data related to a single row on import file. */ +@SuppressWarnings("SameNameButDifferent") @Value public class ImportRow { int rowNumber; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java index 74d2537685..7addf96a55 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -24,6 +24,7 @@ * logging transaction batch results and managing event listeners. Concrete implementations should * define how to log transaction batches and handle errors. */ +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public abstract class AbstractImportLogger implements ImportEventListener { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java index f33c4ba188..1bdec6cd75 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java @@ -10,6 +10,7 @@ */ @Value @Builder +@SuppressWarnings("SameNameButDifferent") public class ImportLoggerConfig { /** * The directory path where log files will be stored. This path should end with a directory diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java index b85ee8a33b..c5ef96714d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java @@ -10,6 +10,7 @@ * it creates. It's annotated with Lombok's {@code @AllArgsConstructor} to automatically generate a * constructor that initializes the configuration field. */ +@SuppressWarnings("SameNameButDifferent") @AllArgsConstructor public class DefaultLogWriterFactory implements LogWriterFactory { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java index 1175b192db..81514e61a7 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java @@ -37,6 +37,7 @@ * supports both transactional and non-transactional (storage) modes and provides event notification * capabilities for monitoring the import process. */ +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public abstract class ImportProcessor { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java index 36b96f62d5..b09a27cf0c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java @@ -17,6 +17,7 @@ *

    This class is immutable and uses the Builder pattern for construction. It encapsulates all * required parameters and dependencies for processing data imports in ScalarDB. */ +@SuppressWarnings("SameNameButDifferent") @Builder @Value public class ImportProcessorParams { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java index 3be177a00a..5187225cc2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java @@ -38,6 +38,7 @@ * functionality to import data into single or multiple tables based on the provided import options * and control file configurations. */ +@SuppressWarnings({"SameNameButDifferent"}) @RequiredArgsConstructor public abstract class ImportTask { @@ -148,7 +149,8 @@ private List startMultiTableImportProcess( copyNode); targetResults.add(result); } - return targetResults; + // Wrapped in unmodifiable list to fix MixedMutabilityReturnType error-prone warning + return Collections.unmodifiableList(targetResults); } /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java index eafe3a42ae..f68a526b8e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java @@ -14,6 +14,7 @@ * Parameters required for executing an import task in the data loader. This class encapsulates all * necessary information needed to process and import a single record into ScalarDB. */ +@SuppressWarnings("SameNameButDifferent") @Builder @Value public class ImportTaskParams { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java index 449270d929..a5b7648905 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java @@ -3,8 +3,6 @@ import com.scalar.db.api.DistributedTransaction; import com.scalar.db.api.Result; import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; -import com.scalar.db.exception.transaction.AbortException; -import com.scalar.db.exception.transaction.TransactionException; import com.scalar.db.io.Column; import com.scalar.db.io.Key; import java.util.List; @@ -83,24 +81,4 @@ protected void saveRecord( throws ScalarDBDaoException { params.getDao().put(namespace, tableName, partitionKey, clusteringKey, columns, transaction); } - - /** - * Aborts the active ScalarDB transaction if it has not been committed. - * - *

    This method provides a safe way to abort an active transaction, handling any abort-related - * exceptions by wrapping them in a {@link TransactionException}. - * - * @param tx the transaction to be aborted. If null, this method does nothing - * @throws TransactionException if an error occurs during the abort operation or if the underlying - * abort operation fails - */ - private void abortActiveTransaction(DistributedTransaction tx) throws TransactionException { - if (tx != null) { - try { - tx.abort(); - } catch (AbortException e) { - throw new TransactionException(e.getMessage(), tx.getId()); - } - } - } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java index 0fe4e0379d..55a5e2ba99 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java @@ -6,6 +6,8 @@ import lombok.Builder; import lombok.Value; +/** To store import target result. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value public class ImportTargetResult { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java index 3e08cc709b..0d4dba0c86 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java @@ -7,6 +7,7 @@ import lombok.Builder; import lombok.Value; +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value @JsonDeserialize(builder = ImportTaskResult.ImportTaskResultBuilder.class) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java index 30b878b9e6..5c299f9a66 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java @@ -31,17 +31,29 @@ public void addErrorMessage(String columnName, String errorMessage) { this.errorMessages.add(errorMessage); } - /** @return Immutable list of validation error messages */ + /** + * Return error messages list. + * + * @return Immutable list of validation error messages. + */ public List getErrorMessages() { return Collections.unmodifiableList(this.errorMessages); } - /** @return Immutable set of columns that had errors */ + /** + * A set of columns with errors is stored and returned. + * + * @return Immutable set of columns that had errors. + */ public Set getColumnsWithErrors() { return Collections.unmodifiableSet(this.columnsWithErrors); } - /** @return Validation is valid or not */ + /** + * Stores validation result. + * + * @return Validation is valid or not. + */ public boolean isValid() { return this.errorMessages.isEmpty(); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java index 6d773ffccc..a046830172 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java @@ -9,6 +9,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +@SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class ImportSourceRecordValidator { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java index a922fd8afa..6fef97c568 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java @@ -6,6 +6,7 @@ import lombok.Value; /** Transaction batch details */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value public class ImportTransactionBatch { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java index 0e44b66953..887b9a392e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java @@ -8,6 +8,7 @@ import lombok.Value; /** Transaction batch result */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value @JsonDeserialize(builder = ImportTransactionBatchResult.ImportTransactionBatchResultBuilder.class) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java index 1b7bae34c6..42d37eb64d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java @@ -5,7 +5,8 @@ import lombok.Builder; import lombok.Value; -/** Batch status details */ +/** Batch status details. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value public class ImportTransactionBatchStatus { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java index 8e79da3d6b..3b730a081a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java @@ -3,6 +3,7 @@ import lombok.Getter; /** Represents the request for metadata for a single ScalarDB table */ +@SuppressWarnings("SameNameButDifferent") @Getter public class TableMetadataRequest { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java index 8816945800..f91435fe5f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java @@ -14,6 +14,7 @@ * Service for retrieving {@link TableMetadata} from ScalarDB. Provides methods to fetch metadata * for individual tables or a collection of tables. */ +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public class TableMetadataService { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java index ddc15a1e59..7cd1834d83 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java @@ -11,6 +11,7 @@ import lombok.NoArgsConstructor; /** Utility class for handling ScalarDB table metadata operations. */ +@SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class TableMetadataUtil { From 3de9ad7eef0ecb5669d1e63ed287a3e8352ba36b Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 11 Jun 2025 18:45:41 +0530 Subject: [PATCH 18/27] Added annotation for same name but different --- .../core/dataimport/processor/ImportProcessorTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java index b5163eadb9..d60ebecb00 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java @@ -55,6 +55,7 @@ *

    Additionally, this class tests the thread executor behavior in ImportProcessor, including * proper shutdown, waiting for tasks to complete, handling interruptions, and task distribution. */ +@SuppressWarnings("SameNameButDifferent") @ExtendWith(MockitoExtension.class) class ImportProcessorTest { From 0f03536d11b2232f672075f0e9a5db0cb7f5f3bb Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 11 Jun 2025 19:30:50 +0530 Subject: [PATCH 19/27] Changd put to mutate --- .../scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java index 8066141ec2..cd1f2f257c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java @@ -18,6 +18,7 @@ import com.scalar.db.io.Key; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; import java.util.Optional; @@ -107,7 +108,7 @@ public void put( Put put = createPutWith(namespace, table, partitionKey, clusteringKey, columns); try { - transaction.put(put); + transaction.mutate(Collections.singletonList(put)); } catch (CrudException e) { throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); From bfad6d367df66cf1805d05e867da1221ff639511 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Wed, 11 Jun 2025 19:55:07 +0530 Subject: [PATCH 20/27] Removed constant formatter to fix warning --- .../dataloader/cli/command/dataexport/ExportCommand.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java index 664bb079e8..015366258e 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java @@ -45,7 +45,6 @@ @CommandLine.Command(name = "export", description = "export data from a ScalarDB table") public class ExportCommand extends ExportCommandOptions implements Callable { - private static final String EXPORT_FILE_NAME_FORMAT = "export.%s.%s.%s.%s"; private static final Logger logger = LoggerFactory.getLogger(ExportCommand.class); @Spec CommandSpec spec; @@ -169,11 +168,8 @@ private String getOutputAbsoluteFilePath( String fileName = StringUtils.isBlank(outputFileName) ? String.format( - EXPORT_FILE_NAME_FORMAT, - namespace, - table, - System.nanoTime(), - outputFormat.toString().toLowerCase()) + "export.%s.%s.%s.%s", + namespace, table, System.nanoTime(), outputFormat.toString().toLowerCase()) : outputFileName; if (StringUtils.isBlank(outputDirectory)) { From be503d661775b1cd0bcd05ad93953443b33480f8 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Thu, 12 Jun 2025 09:08:56 +0530 Subject: [PATCH 21/27] Revert "Changd put to mutate" This reverts commit 0f03536d11b2232f672075f0e9a5db0cb7f5f3bb. --- .../scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java index cd1f2f257c..8066141ec2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java @@ -18,7 +18,6 @@ import com.scalar.db.io.Key; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; import java.util.Optional; @@ -108,7 +107,7 @@ public void put( Put put = createPutWith(namespace, table, partitionKey, clusteringKey, columns); try { - transaction.mutate(Collections.singletonList(put)); + transaction.put(put); } catch (CrudException e) { throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); From ac03202f139e0e7d039ac7f74fb6269fceb8b569 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Thu, 12 Jun 2025 09:29:26 +0530 Subject: [PATCH 22/27] Changes --- .../cli/command/dataexport/ScanOrderingConverter.java | 9 +++++---- .../db/dataloader/cli/util/CommandLineInputUtils.java | 4 ++-- .../java/com/scalar/db/dataloader/core/ScanRange.java | 2 +- .../core/dataimport/datachunk/ImportDataChunk.java | 2 +- .../core/dataimport/datachunk/ImportDataChunkStatus.java | 2 +- .../dataimport/datachunk/ImportDataChunkStatusState.java | 2 +- .../core/dataimport/processor/ImportProcessor.java | 2 +- .../core/dataimport/task/ImportTaskAction.java | 2 +- .../transactionbatch/ImportTransactionBatch.java | 2 +- .../transactionbatch/ImportTransactionBatchResult.java | 2 +- gradle/spotbugs-exclude.xml | 2 +- 11 files changed, 16 insertions(+), 15 deletions(-) diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java index 44267017ca..373588b0b4 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java @@ -9,12 +9,13 @@ public class ScanOrderingConverter implements CommandLine.ITypeConverter> { /** - * Converts a comma-separated string of key-value pairs into a list of {@link Scan.Ordering} - * objects. Each pair must be in the format "column=order", where "order" is a valid enum value of - * {@link Scan.Ordering.Order} (e.g., ASC or DESC, case-insensitive). + * Converts a comma-separated string of key-value pairs into a list of {@link + * com.scalar.db.api.Scan.Ordering} objects. Each pair must be in the format "column=order", where + * "order" is a valid enum value of {@link com.scalar.db.api.Scan.Ordering} (e.g., ASC or DESC, + * case-insensitive). * * @param value the comma-separated key-value string to convert - * @return a list of {@link Scan.Ordering} objects constructed from the input + * @return a list of {@link com.scalar.db.api.Scan.Ordering} objects constructed from the input * @throws IllegalArgumentException if parsing fails due to invalid format or enum value */ @Override diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java index e3c617d509..4724bf7024 100644 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java @@ -13,7 +13,7 @@ public class CommandLineInputUtils { * Parses a single key-value pair from a string in the format "key=value". * * @param keyValue the key-value string to parse - * @return a {@link Map.Entry} representing the parsed key-value pair + * @return a {@link java.util.Map.Entry} representing the parsed key-value pair * @throws IllegalArgumentException if the input is null, empty, or not in the expected format */ public static Map.Entry parseKeyValue(String keyValue) { @@ -50,7 +50,7 @@ public static String[] splitByDelimiter(String value, String delimiter, int limi /** * Validates that a given integer value is positive. If the value is less than 1, it throws a - * {@link CommandLine.ParameterException} with the specified error message. + * {@link picocli.CommandLine.ParameterException} with the specified error message. * * @param commandLine the {@link CommandLine} instance used to provide context for the exception * @param value the integer value to validate diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java index 726ca0e7f3..baccddfcdf 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java @@ -3,7 +3,7 @@ import com.scalar.db.io.Key; import lombok.Value; -/** * The scan range which is used in data export scan filtering */ +/** The scan range which is used in data export scan filtering */ @SuppressWarnings("SameNameButDifferent") @Value public class ScanRange { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java index 2ab6539d68..65f1f2f895 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java @@ -4,7 +4,7 @@ import lombok.Builder; import lombok.Data; -/** * Import data chunk data. */ +/** Import data chunk data. */ @SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Data @Builder diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java index 0009f71cd1..6c10cecf1f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java @@ -6,7 +6,7 @@ import lombok.Builder; import lombok.Data; -/** * A DTO to store import data chunk details. */ +/** A DTO to store import data chunk details. */ @SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Data @Builder diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java index 9a6398a45e..36688dbecc 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java @@ -1,6 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.datachunk; -/** * Status of the import data chunk during the import process */ +/** Status of the import data chunk during the import process. */ public enum ImportDataChunkStatusState { START, IN_PROGRESS, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java index cdd0db3ec5..bbd93ff796 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java @@ -41,7 +41,7 @@ * supports both transactional and non-transactional (storage) modes and provides event notification * capabilities for monitoring the import process. */ -@SuppressWarnings("SameNameButDifferent") +@SuppressWarnings({"SameNameButDifferent", "FutureReturnValueIgnored"}) @RequiredArgsConstructor public abstract class ImportProcessor { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java index 6b8c95d2a8..bee10e411b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java @@ -1,6 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.task; -/** * Import task actions */ +/** Import task actions. */ public enum ImportTaskAction { INSERT, UPDATE, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java index 6fef97c568..598824935a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java @@ -5,7 +5,7 @@ import lombok.Builder; import lombok.Value; -/** Transaction batch details */ +/** Transaction batch details. */ @SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java index 887b9a392e..71383e8bdf 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java @@ -7,7 +7,7 @@ import lombok.Builder; import lombok.Value; -/** Transaction batch result */ +/** Transaction batch result. */ @SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value diff --git a/gradle/spotbugs-exclude.xml b/gradle/spotbugs-exclude.xml index bab1669d82..2f4be55e24 100644 --- a/gradle/spotbugs-exclude.xml +++ b/gradle/spotbugs-exclude.xml @@ -37,7 +37,7 @@ - + From eecf28a2691f33afdd7f45f8c9d715825b55df57 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 13 Jun 2025 11:18:55 +0530 Subject: [PATCH 23/27] Scan order updated --- .../dataexport/ScanOrderingConverter.java | 20 ++++++++++++++++++- .../dataexport/ScanOrderingConverterTest.java | 7 +++---- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java index 373588b0b4..40a93f4499 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java @@ -7,6 +7,22 @@ import java.util.stream.Collectors; import picocli.CommandLine; +/** + * A {@link picocli.CommandLine.ITypeConverter} implementation that converts a comma-separated + * string of column-order pairs into a list of {@link com.scalar.db.api.Scan.Ordering} objects. + * + *

    This converter is used to parse CLI arguments for scan ordering in ScalarDB-based + * applications. The input string must contain one or more key-value pairs in the format {@code + * column=order}, separated by commas. The {@code order} must be a valid {@link + * com.scalar.db.api.Scan.Ordering.Order} enum value, such as {@code ASC} or {@code DESC} + * (case-insensitive). + * + *

    Example input: {@code "name=asc,age=desc"} results in a list containing {@code + * Scan.Ordering.asc("name")} and {@code Scan.Ordering.desc("age")}. + * + *

    Invalid formats or unrecognized order values will result in an {@link + * IllegalArgumentException}. + */ public class ScanOrderingConverter implements CommandLine.ITypeConverter> { /** * Converts a comma-separated string of key-value pairs into a list of {@link @@ -27,7 +43,9 @@ public List convert(String value) { String columnName = entry.getKey(); Scan.Ordering.Order sortOrder = Scan.Ordering.Order.valueOf(entry.getValue().trim().toUpperCase()); - return new Scan.Ordering(columnName, sortOrder); + return sortOrder == Scan.Ordering.Order.ASC + ? Scan.Ordering.asc(columnName) + : Scan.Ordering.desc(columnName); }) .collect(Collectors.toList()); } diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java index e8836ae156..50167ccd41 100755 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java @@ -29,16 +29,15 @@ void callConvert_withInvalidValue_shouldThrowException() { void callConvert_withValidValueAndOrderAscending_shouldReturnScanOrdering() { String value = "id=ASC,age=DESC"; List expectedOrder = new ArrayList<>(); - expectedOrder.add(new Scan.Ordering("id", Scan.Ordering.Order.ASC)); - expectedOrder.add(new Scan.Ordering("age", Scan.Ordering.Order.DESC)); + expectedOrder.add(Scan.Ordering.asc("id")); + expectedOrder.add(Scan.Ordering.desc("age")); Assertions.assertEquals(expectedOrder, scanOrderingConverter.convert(value)); } @Test void callConvert_withValidValueAndOrderDescending_shouldReturnScanOrdering() { String value = "id=desc"; - List expectedOrder = - Collections.singletonList(new Scan.Ordering("id", Scan.Ordering.Order.DESC)); + List expectedOrder = Collections.singletonList(Scan.Ordering.desc("id")); Assertions.assertEquals(expectedOrder, scanOrderingConverter.convert(value)); } } From e17965613b7228f505ec6237213f2f2d5d14cdfa Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 13 Jun 2025 12:18:53 +0530 Subject: [PATCH 24/27] Fixed symbol not found issue --- .../com/scalar/db/dataloader/core/dataexport/ExportOptions.java | 2 ++ .../core/dataimport/datachunk/ImportDataChunkStatus.java | 2 ++ .../core/dataimport/task/result/ImportTaskResult.java | 2 ++ .../transactionbatch/ImportTransactionBatchResult.java | 2 ++ 4 files changed, 8 insertions(+) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java index b7cad03fa4..b8d8206f99 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java @@ -48,4 +48,6 @@ public static ExportOptionsBuilder builder( .scanPartitionKey(scanPartitionKey) .outputFileFormat(outputFileFormat); } + + public static class ExportOptionsBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java index 6c10cecf1f..1f1ee9fefa 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java @@ -39,4 +39,6 @@ public class ImportDataChunkStatus { @JsonProperty("status") private final ImportDataChunkStatusState status; + + public static class ImportDataChunkStatusBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java index 0d4dba0c86..b61cc10eef 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java @@ -23,4 +23,6 @@ public class ImportTaskResult { @JsonProperty("dataChunkId") int dataChunkId; + + public static class ImportTaskResultBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java index 71383e8bdf..17936939bb 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java @@ -30,4 +30,6 @@ public class ImportTransactionBatchResult { @JsonProperty("success") boolean success; + + public static class ImportTransactionBatchResultBuilder {} } From f75d31be102ffb65373c0c8174bcc03e891d673d Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 13 Jun 2025 15:22:14 +0530 Subject: [PATCH 25/27] Javadoc related fixes --- .../core/DataLoaderObjectMapper.java | 21 +++++++++++++++++++ .../core/dataexport/ExportManager.java | 1 + .../core/dataexport/ExportOptions.java | 7 +++++++ .../dataexport/producer/CsvProducerTask.java | 1 + .../producer/JsonLineProducerTask.java | 1 + .../dataexport/producer/JsonProducerTask.java | 2 ++ .../ControlFileValidationLevel.java | 10 +++++---- .../dao/ScalarDbStorageManager.java | 15 +++++++++---- .../dao/ScalarDbTransactionManager.java | 2 ++ .../datachunk/ImportDataChunkStatus.java | 7 +++++++ .../task/result/ImportTaskResult.java | 7 +++++++ .../ImportSourceRecordValidator.java | 3 +++ .../ImportTransactionBatchResult.java | 7 +++++++ 13 files changed, 76 insertions(+), 8 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java index d90fd49b65..fbb6a8d643 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java @@ -4,8 +4,29 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +/** + * A custom {@link ObjectMapper} used for data loading operations. + * + *

    This mapper is configured to: + * + *

      + *
    • Exclude {@code null} values during serialization + *
    • Support Java 8 date/time types via {@link JavaTimeModule} + *
    + * + *

    It can be reused wherever consistent JSON serialization/deserialization behavior is needed. + */ public class DataLoaderObjectMapper extends ObjectMapper { + /** + * Constructs a {@code DataLoaderObjectMapper} with default settings, including: + * + *

      + *
    • {@link com.fasterxml.jackson.annotation.JsonInclude.Include#NON_NULL} to skip {@code + * null} values + *
    • {@link JavaTimeModule} registration to handle Java 8 date/time types + *
    + */ public DataLoaderObjectMapper() { super(); this.setSerializationInclusion(JsonInclude.Include.NON_NULL); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java index 480550eb7d..d34a423e8b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java @@ -65,6 +65,7 @@ abstract void processFooter( * @param exportOptions Export options * @param tableMetadata Metadata for a single ScalarDB table * @param writer Writer to write the exported data + * @return export report object containing data such as total exported row count */ public ExportReport startExport( ExportOptions exportOptions, TableMetadata tableMetadata, Writer writer) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java index b8d8206f99..3c7ed9ef56 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java @@ -49,5 +49,12 @@ public static ExportOptionsBuilder builder( .outputFileFormat(outputFileFormat); } + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    Although Lombok generates this builder class automatically, Javadoc requires an explicit + * declaration to resolve references in the generated documentation, especially when using a + * custom builder method name (e.g., {@code hiddenBuilder()}). + */ public static class ExportOptionsBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java index bb7cc8c583..7ba86c1bef 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java @@ -34,6 +34,7 @@ public class CsvProducerTask extends ProducerTask { * Class constructor * * @param includeMetadata Include metadata in the exported data + * @param projectColumns list of columns that is required in export data * @param tableMetadata Metadata for a single ScalarDB table * @param columnDataTypes Map of data types for the all columns in a ScalarDB table * @param delimiter Delimiter used in csv content diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java index 64192c2b60..dea8336d4e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java @@ -25,6 +25,7 @@ public class JsonLineProducerTask extends ProducerTask { * Class constructor * * @param includeMetadata Include metadata in the exported data + * @param projectionColumns list of columns that is required in export data * @param tableMetadata Metadata for a single ScalarDB table * @param columnDataTypes Map of data types for the all columns in a ScalarDB table */ diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java index c04b43a12e..69d7177ae2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java @@ -27,8 +27,10 @@ public class JsonProducerTask extends ProducerTask { * Class constructor * * @param includeMetadata Include metadata in the exported data + * @param projectionColumns list of columns that is required in export data * @param tableMetadata Metadata for a single ScalarDB table * @param columnDataTypes Map of data types for the all columns in a ScalarDB table + * @param prettyPrintJson Json data should be formatted or not */ public JsonProducerTask( boolean includeMetadata, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java index 3753d0ba65..37bccd66fa 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java @@ -1,11 +1,13 @@ package com.scalar.db.dataloader.core.dataimport.controlfile; -/** Control file validation level */ +/** Control file validation level. */ public enum ControlFileValidationLevel { - /* All columns need to be mapped */ + /** All columns need to be mapped. */ FULL, - /* All partition key and clustering key columns need to be mapped */ + + /** All partition key and clustering key columns need to be mapped. */ KEYS, - /* Only validate the columns that are mapped */ + + /** Only validate the columns that are mapped. */ MAPPED } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java index 0ed47cc647..5f28ceb5d7 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java @@ -3,7 +3,6 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.service.StorageFactory; -import java.io.IOException; import javax.annotation.Nullable; public class ScalarDbStorageManager { @@ -16,17 +15,25 @@ public class ScalarDbStorageManager { * * @param storageFactory Factory to create all the necessary ScalarDB data managers */ - public ScalarDbStorageManager(StorageFactory storageFactory) throws IOException { + public ScalarDbStorageManager(StorageFactory storageFactory) { storage = storageFactory.getStorage(); storageAdmin = storageFactory.getStorageAdmin(); } - /** Returns distributed storage for ScalarDB connection that is running in storage mode */ + /** + * Returns distributed storage for ScalarDB connection that is running in storage mode + * + * @return distributed storage object + */ public DistributedStorage getDistributedStorage() { return storage; } - /** Returns distributed storage admin for ScalarDB admin operations */ + /** + * Returns distributed storage admin for ScalarDB admin operations + * + * @return distributed storage admin object + */ public DistributedStorageAdmin getDistributedStorageAdmin() { return storageAdmin; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java index 7999c01032..c02416045d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java @@ -15,6 +15,8 @@ public ScalarDbTransactionManager(TransactionFactory transactionFactory) throws /** * Returns distributed Transaction manager for ScalarDB connection that is running in transaction * mode + * + * @return distributed transaction manager object */ public DistributedTransactionManager getDistributedTransactionManager() { return transactionManager; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java index 1f1ee9fefa..2fa6b1cf88 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java @@ -40,5 +40,12 @@ public class ImportDataChunkStatus { @JsonProperty("status") private final ImportDataChunkStatusState status; + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    This class is normally generated automatically by Lombok's {@code @Builder} annotation. + * However, when using a custom builder method name (e.g., {@code hiddenBuilder()}), Javadoc may + * fail to resolve references to this builder unless it is explicitly declared. + */ public static class ImportDataChunkStatusBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java index b61cc10eef..5c91917d05 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java @@ -24,5 +24,12 @@ public class ImportTaskResult { @JsonProperty("dataChunkId") int dataChunkId; + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    This class is normally generated automatically by Lombok's {@code @Builder} annotation. + * However, when using a custom builder method name (e.g., {@code hiddenBuilder()}), Javadoc may + * fail to resolve references to this builder unless it is explicitly declared. + */ public static class ImportTaskResultBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java index a046830172..c85bb3f48a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java @@ -22,6 +22,7 @@ public class ImportSourceRecordValidator { * @param columnNames List of all column names in table * @param sourceRecord source data * @param allColumnsRequired If true treat missing columns as an error + * @param tableMetadata metadata of the table to which data is to be imported * @return Source record validation result */ public static ImportSourceRecordValidationResult validateSourceRecord( @@ -84,6 +85,7 @@ public static void checkMissingKeys( * @param columnNames List of column names for a table * @param validationResult Source record validation result * @param ignoreColumns Columns that can be ignored in the check + * @param tableMetadata metadata of the table to which data is to be imported */ public static void checkMissingColumns( JsonNode sourceRecord, @@ -108,6 +110,7 @@ public static void checkMissingColumns( * @param sourceRecord Source json object * @param columnNames List of column names for a table * @param validationResult Source record validation result + * @param tableMetadata metadata of the table to which data is to be imported */ public static void checkMissingColumns( JsonNode sourceRecord, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java index 17936939bb..072117fd80 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java @@ -31,5 +31,12 @@ public class ImportTransactionBatchResult { @JsonProperty("success") boolean success; + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    This class is normally generated automatically by Lombok's {@code @Builder} annotation. + * However, when using a custom builder method name (e.g., {@code hiddenBuilder()}), Javadoc may + * fail to resolve references to this builder unless it is explicitly declared. + */ public static class ImportTransactionBatchResultBuilder {} } From 06a4a9a1d6f58a086690ab99c020d79caa9a6329 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Fri, 13 Jun 2025 15:59:39 +0530 Subject: [PATCH 26/27] Java doc related changes -2 --- .../core/dataexport/CsvExportManager.java | 1 + .../dataloader/core/dataexport/ExportManager.java | 1 + .../core/dataexport/JsonExportManager.java | 1 + .../core/dataexport/JsonLineExportManager.java | 3 +++ .../dataexport/producer/JsonLineProducerTask.java | 4 ++++ .../dataexport/producer/JsonProducerTask.java | 4 ++++ .../core/dataexport/producer/ProducerTask.java | 10 ++++++++++ .../dataexport/producer/ProducerTaskFactory.java | 1 + .../dataimport/dao/ScalarDbStorageManager.java | 13 +++++++++++-- .../dao/ScalarDbTransactionManager.java | 15 +++++++++++++-- .../task/mapping/ImportDataMapping.java | 1 + .../task/result/ImportResultStatus.java | 1 + .../task/result/ImportTargetResultStatus.java | 1 + .../dataimport/task/result/ImportTaskResult.java | 1 + .../validation/ImportSourceRecordValidator.java | 1 + .../scalar/db/dataloader/core/util/DebugUtil.java | 12 ++++++++++++ .../scalar/db/dataloader/core/util/PathUtil.java | 5 +++++ 17 files changed, 71 insertions(+), 4 deletions(-) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java index 9e0dc4ba46..034c1fa7ed 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java @@ -11,6 +11,7 @@ import java.util.Iterator; import java.util.List; +/** Export manager implementation which manages the export task that exports data in CSV format */ public class CsvExportManager extends ExportManager { public CsvExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java index d34a423e8b..fdc27d664c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java @@ -28,6 +28,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** Export manager class which manages the export task */ @SuppressWarnings({"SameNameButDifferent", "FutureReturnValueIgnored"}) @RequiredArgsConstructor public abstract class ExportManager { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java index 34e382dd5e..4616c2ba59 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java @@ -7,6 +7,7 @@ import java.io.IOException; import java.io.Writer; +/** Export manager implementation which manages the export task that exports data in JSON format */ public class JsonExportManager extends ExportManager { public JsonExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java index 8bc5fabe07..c7167e8f68 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java @@ -7,6 +7,9 @@ import java.io.IOException; import java.io.Writer; +/** + * Export manager implementation which manages the export task that exports data in JSONLines format + */ public class JsonLineExportManager extends ExportManager { public JsonLineExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java index dea8336d4e..aabbba205c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java @@ -17,6 +17,10 @@ import java.util.List; import java.util.Map; +/** + * Producer that converts ScalarDB scan results to JSONLine content. The output is sent to a queue + * to be processed by a consumer + */ public class JsonLineProducerTask extends ProducerTask { private final DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java index 69d7177ae2..742911e211 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java @@ -18,6 +18,10 @@ import java.util.List; import java.util.Map; +/** + * Producer that converts ScalarDB scan results to JSON content. The output is sent to a queue to be + * processed by a consumer + */ public class JsonProducerTask extends ProducerTask { private final DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java index 2ebf01ff10..eb8f93f521 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java @@ -8,6 +8,16 @@ import java.util.Map; import java.util.Set; +/** + * An abstract base class for producer tasks that process chunks of data retrieved from a ScalarDB + * table. + * + *

    Subclasses are expected to implement the {@link #process(List)} method, which transforms a + * chunk of {@link Result} objects into a specific format (e.g., CSV, JSON). + * + *

    This class manages metadata and column projection logic that can be used by all concrete + * implementations. + */ public abstract class ProducerTask { protected final TableMetadata tableMetadata; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java index f4577e463c..2339ab1366 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java @@ -8,6 +8,7 @@ import java.util.Map; import lombok.RequiredArgsConstructor; +/** Responsible to create producer task based on the export file format */ @SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public class ProducerTaskFactory { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java index 5f28ceb5d7..54185b9b3a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java @@ -5,15 +5,24 @@ import com.scalar.db.service.StorageFactory; import javax.annotation.Nullable; +/** + * A manager class for handling ScalarDB operations in storage mode. + * + *

    Provides access to {@link DistributedStorage} for data operations and {@link + * DistributedStorageAdmin} for administrative operations such as schema management. + * + *

    This class is typically used when interacting with ScalarDB in a non-transactional, + * storage-only configuration. + */ public class ScalarDbStorageManager { @Nullable private final DistributedStorage storage; private final DistributedStorageAdmin storageAdmin; /** - * Class constructor + * Constructs a {@code ScalarDbStorageManager} using the provided {@link StorageFactory}. * - * @param storageFactory Factory to create all the necessary ScalarDB data managers + * @param storageFactory the factory used to create the ScalarDB storage and admin instances */ public ScalarDbStorageManager(StorageFactory storageFactory) { storage = storageFactory.getStorage(); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java index c02416045d..e3b6f1bb85 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java @@ -2,13 +2,24 @@ import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.service.TransactionFactory; -import java.io.IOException; +/** + * A manager class for handling ScalarDB operations in transaction mode. + * + *

    Provides access to {@link DistributedTransactionManager} for data operations + * + *

    This class is typically used when interacting with ScalarDB in a transactional configuration. + */ public class ScalarDbTransactionManager { private final DistributedTransactionManager transactionManager; - public ScalarDbTransactionManager(TransactionFactory transactionFactory) throws IOException { + /** + * Constructs a {@code ScalarDbTransactionManager} using the provided {@link TransactionFactory}. + * + * @param transactionFactory the factory used to create the ScalarDB storage and admin instances + */ + public ScalarDbTransactionManager(TransactionFactory transactionFactory) { transactionManager = transactionFactory.getTransactionManager(); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java index 7f7524d263..c777dae66c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java @@ -4,6 +4,7 @@ import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTable; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTableFieldMapping; +/** To map source data to target columns using the control file mappings */ public class ImportDataMapping { /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java index 70c03b9c37..0e23de63bb 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java @@ -1,5 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.task.result; +/** Enum to indicate the import result status */ public enum ImportResultStatus { SUCCESS, PARTIAL_SUCCESS, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java index d774f8f823..93f9b40a63 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java @@ -1,5 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.task.result; +/** Import target result status */ public enum ImportTargetResultStatus { VALIDATION_FAILED, RETRIEVAL_FAILED, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java index 5c91917d05..19c6859c4c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java @@ -7,6 +7,7 @@ import lombok.Builder; import lombok.Value; +/** A DTO to store the import task result */ @SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java index c85bb3f48a..3fbc5ffc38 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java @@ -9,6 +9,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +/** Responsible for validating source data prior to import. */ @SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class ImportSourceRecordValidator { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java index a16e2fae02..75823f1a43 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java @@ -3,6 +3,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * A utility class for debugging purposes, providing methods to log runtime memory usage. + * + *

    This class is typically used to log memory usage at various stages of an application's + * execution to help diagnose memory-related issues or understand memory consumption patterns. + */ public class DebugUtil { private static final Logger logger = LoggerFactory.getLogger(DebugUtil.class); @@ -24,6 +30,12 @@ public static void logMemoryUsage(String stage) { formatMemorySize(maxMemory)); } + /** + * Converts the given memory size in bytes to a human-readable string in megabytes. + * + * @param size the memory size in bytes + * @return the formatted memory size in megabytes, rounded to two decimal places + */ private static String formatMemorySize(long size) { return String.format("%.2f", size / (1024.0 * 1024.0)); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java index c307ea961f..3cc165f5d3 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java @@ -2,6 +2,11 @@ import java.io.File; +/** + * A utility class for common operations related to file system paths. + * + *

    Provides helper methods such as ensuring a trailing path separator for directory paths. + */ public class PathUtil { /** From db73e47a959e9cbbc6e17bb5b04e53ab4369bb01 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Mon, 16 Jun 2025 11:00:58 +0530 Subject: [PATCH 27/27] Javadoc changes and removed an unused file --- .../scalar/db/dataloader/core/Constants.java | 30 +++++++----------- .../db/dataloader/core/DatabaseKeyType.java | 3 ++ .../scalar/db/dataloader/core/FileFormat.java | 14 +++++++++ .../db/dataloader/core/ScalarDbMode.java | 15 ++++++++- .../core/dataexport/CsvExportManager.java | 9 ++++++ .../core/dataexport/ExportReport.java | 10 ++++++ .../core/dataexport/JsonExportManager.java | 9 ++++++ .../dataexport/JsonLineExportManager.java | 9 ++++++ .../dataexport/producer/ProducerTask.java | 26 ++++++++++++++++ .../core/dataimport/ImportMode.java | 11 +++++++ .../datachunk/ImportDataChunkStatusState.java | 5 +++ .../core/dataimport/log/LogMode.java | 14 ++++++++- .../log/SingleFileImportLogger.java | 12 +++++++ .../log/SplitByDataChunkImportLogger.java | 9 ++++++ .../processor/JsonImportProcessor.java | 5 +++ .../dataimport/task/ImportTaskAction.java | 13 ++++++-- .../task/result/ImportResultStatus.java | 13 -------- .../task/result/ImportTargetResultStatus.java | 31 ++++++++++++++++++- 18 files changed, 201 insertions(+), 37 deletions(-) delete mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java index 09759bf5b1..0d5996747a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java @@ -2,26 +2,18 @@ /** The constants that are used in the com.scalar.dataloader.core package */ public class Constants { - - public static final String IMPORT_LOG_ENTRY_STATUS_FIELD = "data_loader_import_status"; + /** + * Format string used for table lookup keys. It expects two string arguments: the namespace and + * the table name, respectively. + * + *

    Example: {@code String.format(TABLE_LOOKUP_KEY_FORMAT, "ns", "table")} will result in + * "ns.table". + */ public static final String TABLE_LOOKUP_KEY_FORMAT = "%s.%s"; - - public static final String LOG_UPDATE_SUCCESS = "Row %s has been updated in table %s.%s"; - public static final String LOG_INSERT_SUCCESS = "Row %s has been inserted into table %s.%s"; - public static final String LOG_IMPORT_VALIDATION = "Validating data for line %s ..."; - public static final String LOG_IMPORT_GET_DATA = - "Retrieving existing data record from database ..."; - public static final String LOG_IMPORT_LINE_SUCCESS = "Row %s import is completed"; - public static final String LOG_IMPORT_LINE_FAILED = "Row %s import has failed: %s"; - public static final String LOG_IMPORT_COMPLETED = - "The import process has been completed. Please check the success and failed output files for a detailed report"; - - public static final String LOG_SCANNING_START = "Retrieving data from %s.%s table ..."; - public static final String LOG_CONVERTING = "Converting %s.%s data to %s ..."; - public static final String MISSING_CSV_HEADERS = - "Valid headers are not present or missing in the provided CSV file"; - public static final String ERROR_MISSING_SOURCE_FIELD = - "the data mapping source field '%s' for table '%s' is missing in the json data record"; + /** + * Status message used to indicate that a transaction was aborted as part of a batch transaction + * failure. + */ public static final String ABORT_TRANSACTION_STATUS = "Transaction aborted as part of batch transaction aborted"; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java index b1926740ad..c435cfe017 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java @@ -2,6 +2,9 @@ /** Type of key in database */ public enum DatabaseKeyType { + /** Represents a partition key, which determines the partition where the data is stored. */ PARTITION, + + /** Represents a clustering key, which determines the order of data within a partition. */ CLUSTERING } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java index 8a7b2a6113..184b5b38c2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java @@ -2,7 +2,21 @@ /** The available input and output formats for the data loader import and export commands */ public enum FileFormat { + /** + * JSON (JavaScript Object Notation) format. Typically, represents the entire dataset as a single + * JSON array or object. + */ JSON, + + /** + * JSON Lines (JSONL) format. Each line is a separate JSON object, making it suitable for + * streaming large datasets. + */ JSONL, + + /** + * CSV (Comma-Separated Values) format. A plain text format where each line represents a row and + * columns are separated by commas. + */ CSV } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java index 4f9e79ce61..4de647e286 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java @@ -1,7 +1,20 @@ package com.scalar.db.dataloader.core; -/** The available modes a ScalarDB instance can run in */ +/** + * The available modes a ScalarDB instance can run in. Determines how ScalarDB interacts with the + * underlying database. + */ public enum ScalarDbMode { + + /** + * Storage mode: Operates directly on the underlying storage engine without transactional + * guarantees. Suitable for raw data access and simple CRUD operations. + */ STORAGE, + + /** + * Transaction mode: Provides transaction management with ACID guarantees across multiple + * operations. Suitable for applications that require consistency and atomicity. + */ TRANSACTION } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java index 034c1fa7ed..64f059852e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java @@ -13,6 +13,15 @@ /** Export manager implementation which manages the export task that exports data in CSV format */ public class CsvExportManager extends ExportManager { + + /** + * Constructs a {@code CsvExportManager} with the specified {@link DistributedStorage}, {@link + * ScalarDbDao}, and {@link ProducerTaskFactory}. + * + * @param storage the {@code DistributedStorage} instance used to read data from the database + * @param dao the {@code ScalarDbDao} used to execute export-related database operations + * @param producerTaskFactory the factory used to create producer tasks for exporting data + */ public CsvExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java index d856b7b0ff..8e9b2a90af 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java @@ -16,10 +16,20 @@ public class ExportReport { */ private final LongAdder exportedRowCount = new LongAdder(); + /** + * Returns the total number of rows that have been exported so far. + * + * @return the cumulative exported row count + */ public long getExportedRowCount() { return exportedRowCount.sum(); } + /** + * Increments the exported row count by the specified value. + * + * @param count the number of rows to add to the exported count + */ public void updateExportedRowCount(long count) { this.exportedRowCount.add(count); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java index 4616c2ba59..fadac644a2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java @@ -9,6 +9,15 @@ /** Export manager implementation which manages the export task that exports data in JSON format */ public class JsonExportManager extends ExportManager { + + /** + * Constructs a {@code JsonExportManager} with the specified {@link DistributedStorage}, {@link + * ScalarDbDao}, and {@link ProducerTaskFactory}. + * + * @param storage the {@code DistributedStorage} instance used to read data from the database + * @param dao the {@code ScalarDbDao} used to execute export-related database operations + * @param producerTaskFactory the factory used to create producer tasks for exporting data + */ public JsonExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java index c7167e8f68..2ce21deb7d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java @@ -11,6 +11,15 @@ * Export manager implementation which manages the export task that exports data in JSONLines format */ public class JsonLineExportManager extends ExportManager { + + /** + * Constructs a {@code JsonLineExportManager} with the specified {@link DistributedStorage}, + * {@link ScalarDbDao}, and {@link ProducerTaskFactory}. + * + * @param storage the {@code DistributedStorage} instance used to read data from the database + * @param dao the {@code ScalarDbDao} used to execute export-related database operations + * @param producerTaskFactory the factory used to create producer tasks for exporting data + */ public JsonLineExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java index eb8f93f521..acb9a90519 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java @@ -20,9 +20,28 @@ */ public abstract class ProducerTask { + /** + * The metadata of the table from which the data is being exported. Used to understand schema + * details such as column types and primary keys. + */ protected final TableMetadata tableMetadata; + + /** + * A map of column names to their corresponding data types. Derived from the table metadata to + * assist in formatting output correctly. + */ protected final Map dataTypeByColumnName; + + /** + * Flag indicating whether to include internal metadata columns (e.g., transaction metadata) in + * the output. + */ protected final boolean includeMetadata; + + /** + * A set of column names to include in the exported output. If empty, all columns are included by + * default. + */ protected final Set projectedColumnsSet; /** @@ -44,5 +63,12 @@ protected ProducerTask( this.dataTypeByColumnName = columnDataTypes; } + /** + * Processes a chunk of export data and returns a formatted string representation of the chunk. + * + * @param dataChunk the list of {@link Result} objects representing a chunk of data to be exported + * @return a formatted string representing the processed data chunk, ready to be written to the + * output + */ public abstract String process(List dataChunk); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java index 7f2a805e75..8096123baa 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java @@ -2,7 +2,18 @@ /** Represents the way to be imported data is handled */ public enum ImportMode { + /** + * Insert mode: Adds new rows to the database. Fails if the row with the same primary key already + * exists. + */ INSERT, + + /** + * Update mode: Modifies existing rows in the database. Fails if the row with the specified key + * does not exist. + */ UPDATE, + + /** Upsert mode: Inserts new rows or updates existing ones if a row with the same key exists. */ UPSERT } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java index 36688dbecc..4b9e443ae1 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java @@ -2,7 +2,12 @@ /** Status of the import data chunk during the import process. */ public enum ImportDataChunkStatusState { + /** Indicates that the import of the data chunk has started but has not yet progressed. */ START, + + /** Indicates that the import of the data chunk is currently in progress. */ IN_PROGRESS, + + /** Indicates that the import of the data chunk has been successfully completed. */ COMPLETE } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java index cf0349366c..0af891ea54 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java @@ -1,7 +1,19 @@ package com.scalar.db.dataloader.core.dataimport.log; -/** Log modes available for import logging */ +/** + * Log modes available for import logging. Determines how logs are organized and written during the + * import process. + */ public enum LogMode { + /** + * Logs all import-related messages into a single log file. Useful for centralized and sequential + * log analysis. + */ SINGLE_FILE, + + /** + * Splits logs into separate files for each data chunk being imported. Useful for parallel + * processing and debugging individual chunks independently. + */ SPLIT_BY_DATA_CHUNK } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index 95d13d2983..f1ba3c0b03 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -30,9 +30,21 @@ @ThreadSafe public class SingleFileImportLogger extends AbstractImportLogger { + /** + * Name format for the file where the summary of the import process will be logged. This file + * typically includes overall statistics and a high-level status of the import. + */ protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; + + /** Name format for the file where successfully imported records will be logged in JSON format. */ protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; + + /** + * Name format for the file where failed import records will be logged in JSON format, typically + * along with error details or reasons for failure. + */ protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; + private static final Logger logger = LoggerFactory.getLogger(SingleFileImportLogger.class); private volatile LogWriter summaryLogWriter; private final LogWriter successLogWriter; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index cff2d5d445..3970ca4017 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -36,8 +36,17 @@ @ThreadSafe public class SplitByDataChunkImportLogger extends AbstractImportLogger { + /** + * Name format for the file where the summary of the import process will be logged. This file + * typically includes overall statistics and a high-level status of the import. + */ protected static final String SUMMARY_LOG_FILE_NAME_FORMAT = "data_chunk_%s_summary.json"; + /** + * Name format for the file where failed import records will be logged in JSON format, typically + * along with error details or reasons for failure. + */ protected static final String FAILURE_LOG_FILE_NAME_FORMAT = "data_chunk_%s_failure.json"; + /** Name format for the file where successfully imported records will be logged in JSON format. */ protected static final String SUCCESS_LOG_FILE_NAME_FORMAT = "data_chunk_%s_success.json"; private static final Logger logger = LoggerFactory.getLogger(SplitByDataChunkImportLogger.class); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java index 7102d5d4f2..c435f0f13d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java @@ -38,6 +38,11 @@ public class JsonImportProcessor extends ImportProcessor { private static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); private final AtomicInteger dataChunkIdCounter = new AtomicInteger(0); + /** + * Creates a new JsonImportProcessor with the specified parameters. + * + * @param params Configuration parameters for the import processor + */ public JsonImportProcessor(ImportProcessorParams params) { super(params); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java index bee10e411b..804c4c9f3e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java @@ -1,7 +1,16 @@ package com.scalar.db.dataloader.core.dataimport.task; -/** Import task actions. */ +/** Represents the type of action to perform for an import task. */ public enum ImportTaskAction { + /** + * Insert action: Adds a new record to the database. Fails if the record with the same primary key + * already exists. + */ INSERT, - UPDATE, + + /** + * Update action: Modifies an existing record in the database. Fails if the record does not + * already exist. + */ + UPDATE } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java deleted file mode 100644 index 0e23de63bb..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.scalar.db.dataloader.core.dataimport.task.result; - -/** Enum to indicate the import result status */ -public enum ImportResultStatus { - SUCCESS, - PARTIAL_SUCCESS, - FAILURE, - VALIDATION_FAILED, - RETRIEVAL_FAILED, - MAPPING_FAILED, - TIMEOUT, - CANCELLED -} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java index 93f9b40a63..fa23324f08 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java @@ -1,13 +1,42 @@ package com.scalar.db.dataloader.core.dataimport.task.result; -/** Import target result status */ +/** + * Import target result status. Represents the outcome of processing a single import target (e.g., a + * data row or record). + */ public enum ImportTargetResultStatus { + /** + * The import failed due to validation errors (e.g., missing required fields, invalid formats). + */ VALIDATION_FAILED, + + /** + * The import failed because the existing data could not be retrieved (e.g., due to I/O or + * database issues). + */ RETRIEVAL_FAILED, + + /** The import failed due to missing required columns in the input data. */ MISSING_COLUMNS, + + /** + * The import was skipped because the data already exists and cannot be overwritten (in INSERT + * mode). + */ DATA_ALREADY_EXISTS, + + /** The import failed because the required existing data was not found (e.g., in UPDATE mode). */ DATA_NOT_FOUND, + + /** The import failed during the save operation (e.g., due to database write errors). */ SAVE_FAILED, + + /** The import target was successfully saved to the database. */ SAVED, + + /** + * The import process was aborted before completion (e.g., due to a batch failure or external + * cancellation). + */ ABORTED }