diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java index 664bb079e8..015366258e 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java @@ -45,7 +45,6 @@ @CommandLine.Command(name = "export", description = "export data from a ScalarDB table") public class ExportCommand extends ExportCommandOptions implements Callable { - private static final String EXPORT_FILE_NAME_FORMAT = "export.%s.%s.%s.%s"; private static final Logger logger = LoggerFactory.getLogger(ExportCommand.class); @Spec CommandSpec spec; @@ -169,11 +168,8 @@ private String getOutputAbsoluteFilePath( String fileName = StringUtils.isBlank(outputFileName) ? String.format( - EXPORT_FILE_NAME_FORMAT, - namespace, - table, - System.nanoTime(), - outputFormat.toString().toLowerCase()) + "export.%s.%s.%s.%s", + namespace, table, System.nanoTime(), outputFormat.toString().toLowerCase()) : outputFileName; if (StringUtils.isBlank(outputDirectory)) { diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java index 44267017ca..40a93f4499 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverter.java @@ -7,14 +7,31 @@ import java.util.stream.Collectors; import picocli.CommandLine; +/** + * A {@link picocli.CommandLine.ITypeConverter} implementation that converts a comma-separated + * string of column-order pairs into a list of {@link com.scalar.db.api.Scan.Ordering} objects. + * + *

This converter is used to parse CLI arguments for scan ordering in ScalarDB-based + * applications. The input string must contain one or more key-value pairs in the format {@code + * column=order}, separated by commas. The {@code order} must be a valid {@link + * com.scalar.db.api.Scan.Ordering.Order} enum value, such as {@code ASC} or {@code DESC} + * (case-insensitive). + * + *

Example input: {@code "name=asc,age=desc"} results in a list containing {@code + * Scan.Ordering.asc("name")} and {@code Scan.Ordering.desc("age")}. + * + *

Invalid formats or unrecognized order values will result in an {@link + * IllegalArgumentException}. + */ public class ScanOrderingConverter implements CommandLine.ITypeConverter> { /** - * Converts a comma-separated string of key-value pairs into a list of {@link Scan.Ordering} - * objects. Each pair must be in the format "column=order", where "order" is a valid enum value of - * {@link Scan.Ordering.Order} (e.g., ASC or DESC, case-insensitive). + * Converts a comma-separated string of key-value pairs into a list of {@link + * com.scalar.db.api.Scan.Ordering} objects. Each pair must be in the format "column=order", where + * "order" is a valid enum value of {@link com.scalar.db.api.Scan.Ordering} (e.g., ASC or DESC, + * case-insensitive). * * @param value the comma-separated key-value string to convert - * @return a list of {@link Scan.Ordering} objects constructed from the input + * @return a list of {@link com.scalar.db.api.Scan.Ordering} objects constructed from the input * @throws IllegalArgumentException if parsing fails due to invalid format or enum value */ @Override @@ -26,7 +43,9 @@ public List convert(String value) { String columnName = entry.getKey(); Scan.Ordering.Order sortOrder = Scan.Ordering.Order.valueOf(entry.getValue().trim().toUpperCase()); - return new Scan.Ordering(columnName, sortOrder); + return sortOrder == Scan.Ordering.Order.ASC + ? Scan.Ordering.asc(columnName) + : Scan.Ordering.desc(columnName); }) .collect(Collectors.toList()); } diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java index e3c617d509..4724bf7024 100644 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java @@ -13,7 +13,7 @@ public class CommandLineInputUtils { * Parses a single key-value pair from a string in the format "key=value". * * @param keyValue the key-value string to parse - * @return a {@link Map.Entry} representing the parsed key-value pair + * @return a {@link java.util.Map.Entry} representing the parsed key-value pair * @throws IllegalArgumentException if the input is null, empty, or not in the expected format */ public static Map.Entry parseKeyValue(String keyValue) { @@ -50,7 +50,7 @@ public static String[] splitByDelimiter(String value, String delimiter, int limi /** * Validates that a given integer value is positive. If the value is less than 1, it throws a - * {@link CommandLine.ParameterException} with the specified error message. + * {@link picocli.CommandLine.ParameterException} with the specified error message. * * @param commandLine the {@link CommandLine} instance used to provide context for the exception * @param value the integer value to validate diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java index e8836ae156..50167ccd41 100755 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java @@ -29,16 +29,15 @@ void callConvert_withInvalidValue_shouldThrowException() { void callConvert_withValidValueAndOrderAscending_shouldReturnScanOrdering() { String value = "id=ASC,age=DESC"; List expectedOrder = new ArrayList<>(); - expectedOrder.add(new Scan.Ordering("id", Scan.Ordering.Order.ASC)); - expectedOrder.add(new Scan.Ordering("age", Scan.Ordering.Order.DESC)); + expectedOrder.add(Scan.Ordering.asc("id")); + expectedOrder.add(Scan.Ordering.desc("age")); Assertions.assertEquals(expectedOrder, scanOrderingConverter.convert(value)); } @Test void callConvert_withValidValueAndOrderDescending_shouldReturnScanOrdering() { String value = "id=desc"; - List expectedOrder = - Collections.singletonList(new Scan.Ordering("id", Scan.Ordering.Order.DESC)); + List expectedOrder = Collections.singletonList(Scan.Ordering.desc("id")); Assertions.assertEquals(expectedOrder, scanOrderingConverter.convert(value)); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java index 685f58a833..080d142939 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ColumnInfo.java @@ -9,6 +9,7 @@ *

This class holds the metadata for a column, including the namespace (schema), table name, and * the column name within the table. */ +@SuppressWarnings("SameNameButDifferent") @Value @Builder public class ColumnInfo { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java index 09759bf5b1..0d5996747a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/Constants.java @@ -2,26 +2,18 @@ /** The constants that are used in the com.scalar.dataloader.core package */ public class Constants { - - public static final String IMPORT_LOG_ENTRY_STATUS_FIELD = "data_loader_import_status"; + /** + * Format string used for table lookup keys. It expects two string arguments: the namespace and + * the table name, respectively. + * + *

Example: {@code String.format(TABLE_LOOKUP_KEY_FORMAT, "ns", "table")} will result in + * "ns.table". + */ public static final String TABLE_LOOKUP_KEY_FORMAT = "%s.%s"; - - public static final String LOG_UPDATE_SUCCESS = "Row %s has been updated in table %s.%s"; - public static final String LOG_INSERT_SUCCESS = "Row %s has been inserted into table %s.%s"; - public static final String LOG_IMPORT_VALIDATION = "Validating data for line %s ..."; - public static final String LOG_IMPORT_GET_DATA = - "Retrieving existing data record from database ..."; - public static final String LOG_IMPORT_LINE_SUCCESS = "Row %s import is completed"; - public static final String LOG_IMPORT_LINE_FAILED = "Row %s import has failed: %s"; - public static final String LOG_IMPORT_COMPLETED = - "The import process has been completed. Please check the success and failed output files for a detailed report"; - - public static final String LOG_SCANNING_START = "Retrieving data from %s.%s table ..."; - public static final String LOG_CONVERTING = "Converting %s.%s data to %s ..."; - public static final String MISSING_CSV_HEADERS = - "Valid headers are not present or missing in the provided CSV file"; - public static final String ERROR_MISSING_SOURCE_FIELD = - "the data mapping source field '%s' for table '%s' is missing in the json data record"; + /** + * Status message used to indicate that a transaction was aborted as part of a batch transaction + * failure. + */ public static final String ABORT_TRANSACTION_STATUS = "Transaction aborted as part of batch transaction aborted"; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java index d90fd49b65..fbb6a8d643 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderObjectMapper.java @@ -4,8 +4,29 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +/** + * A custom {@link ObjectMapper} used for data loading operations. + * + *

This mapper is configured to: + * + *

+ * + *

It can be reused wherever consistent JSON serialization/deserialization behavior is needed. + */ public class DataLoaderObjectMapper extends ObjectMapper { + /** + * Constructs a {@code DataLoaderObjectMapper} with default settings, including: + * + *

+ */ public DataLoaderObjectMapper() { super(); this.setSerializationInclusion(JsonInclude.Include.NON_NULL); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java index b1926740ad..c435cfe017 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DatabaseKeyType.java @@ -2,6 +2,9 @@ /** Type of key in database */ public enum DatabaseKeyType { + /** Represents a partition key, which determines the partition where the data is stored. */ PARTITION, + + /** Represents a clustering key, which determines the order of data within a partition. */ CLUSTERING } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java index 8a7b2a6113..184b5b38c2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/FileFormat.java @@ -2,7 +2,21 @@ /** The available input and output formats for the data loader import and export commands */ public enum FileFormat { + /** + * JSON (JavaScript Object Notation) format. Typically, represents the entire dataset as a single + * JSON array or object. + */ JSON, + + /** + * JSON Lines (JSONL) format. Each line is a separate JSON object, making it suitable for + * streaming large datasets. + */ JSONL, + + /** + * CSV (Comma-Separated Values) format. A plain text format where each line represents a row and + * columns are separated by commas. + */ CSV } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java index 4f9e79ce61..4de647e286 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java @@ -1,7 +1,20 @@ package com.scalar.db.dataloader.core; -/** The available modes a ScalarDB instance can run in */ +/** + * The available modes a ScalarDB instance can run in. Determines how ScalarDB interacts with the + * underlying database. + */ public enum ScalarDbMode { + + /** + * Storage mode: Operates directly on the underlying storage engine without transactional + * guarantees. Suitable for raw data access and simple CRUD operations. + */ STORAGE, + + /** + * Transaction mode: Provides transaction management with ACID guarantees across multiple + * operations. Suitable for applications that require consistency and atomicity. + */ TRANSACTION } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java index b1ae7b02d6..baccddfcdf 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScanRange.java @@ -3,7 +3,8 @@ import com.scalar.db.io.Key; import lombok.Value; -/** * The scan range which is used in data export scan filtering */ +/** The scan range which is used in data export scan filtering */ +@SuppressWarnings("SameNameButDifferent") @Value public class ScanRange { /** The key for scan start filter */ diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java index 9e0dc4ba46..64f059852e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java @@ -11,7 +11,17 @@ import java.util.Iterator; import java.util.List; +/** Export manager implementation which manages the export task that exports data in CSV format */ public class CsvExportManager extends ExportManager { + + /** + * Constructs a {@code CsvExportManager} with the specified {@link DistributedStorage}, {@link + * ScalarDbDao}, and {@link ProducerTaskFactory}. + * + * @param storage the {@code DistributedStorage} instance used to read data from the database + * @param dao the {@code ScalarDbDao} used to execute export-related database operations + * @param producerTaskFactory the factory used to create producer tasks for exporting data + */ public CsvExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java index 13f33a319a..fdc27d664c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java @@ -28,6 +28,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** Export manager class which manages the export task */ +@SuppressWarnings({"SameNameButDifferent", "FutureReturnValueIgnored"}) @RequiredArgsConstructor public abstract class ExportManager { private static final Logger logger = LoggerFactory.getLogger(ExportManager.class); @@ -64,6 +66,7 @@ abstract void processFooter( * @param exportOptions Export options * @param tableMetadata Metadata for a single ScalarDB table * @param writer Writer to write the exported data + * @return export report object containing data such as total exported row count */ public ExportReport startExport( ExportOptions exportOptions, TableMetadata tableMetadata, Writer writer) { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java index da515cf3c2..3c7ed9ef56 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportOptions.java @@ -9,8 +9,8 @@ import lombok.Builder; import lombok.Data; -/** Options for a ScalarDB export data operation */ -@SuppressWarnings("SameNameButDifferent") +/** Options for a ScalarDB export data operation. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder(builderMethodName = "hiddenBuilder") @Data public class ExportOptions { @@ -31,6 +31,15 @@ public class ExportOptions { @Builder.Default private List projectionColumns = Collections.emptyList(); private List sortOrders; + /** + * Generates and returns an export options builder. + * + * @param namespace namespaces for export + * @param tableName tableName for export + * @param scanPartitionKey scan partition key for export + * @param outputFileFormat output file format for export + * @return a configured export options builder + */ public static ExportOptionsBuilder builder( String namespace, String tableName, Key scanPartitionKey, FileFormat outputFileFormat) { return hiddenBuilder() @@ -39,4 +48,13 @@ public static ExportOptionsBuilder builder( .scanPartitionKey(scanPartitionKey) .outputFileFormat(outputFileFormat); } + + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

Although Lombok generates this builder class automatically, Javadoc requires an explicit + * declaration to resolve references in the generated documentation, especially when using a + * custom builder method name (e.g., {@code hiddenBuilder()}). + */ + public static class ExportOptionsBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java index d856b7b0ff..8e9b2a90af 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportReport.java @@ -16,10 +16,20 @@ public class ExportReport { */ private final LongAdder exportedRowCount = new LongAdder(); + /** + * Returns the total number of rows that have been exported so far. + * + * @return the cumulative exported row count + */ public long getExportedRowCount() { return exportedRowCount.sum(); } + /** + * Increments the exported row count by the specified value. + * + * @param count the number of rows to add to the exported count + */ public void updateExportedRowCount(long count) { this.exportedRowCount.add(count); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java index 34e382dd5e..fadac644a2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java @@ -7,7 +7,17 @@ import java.io.IOException; import java.io.Writer; +/** Export manager implementation which manages the export task that exports data in JSON format */ public class JsonExportManager extends ExportManager { + + /** + * Constructs a {@code JsonExportManager} with the specified {@link DistributedStorage}, {@link + * ScalarDbDao}, and {@link ProducerTaskFactory}. + * + * @param storage the {@code DistributedStorage} instance used to read data from the database + * @param dao the {@code ScalarDbDao} used to execute export-related database operations + * @param producerTaskFactory the factory used to create producer tasks for exporting data + */ public JsonExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java index 8bc5fabe07..2ce21deb7d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java @@ -7,7 +7,19 @@ import java.io.IOException; import java.io.Writer; +/** + * Export manager implementation which manages the export task that exports data in JSONLines format + */ public class JsonLineExportManager extends ExportManager { + + /** + * Constructs a {@code JsonLineExportManager} with the specified {@link DistributedStorage}, + * {@link ScalarDbDao}, and {@link ProducerTaskFactory}. + * + * @param storage the {@code DistributedStorage} instance used to read data from the database + * @param dao the {@code ScalarDbDao} used to execute export-related database operations + * @param producerTaskFactory the factory used to create producer tasks for exporting data + */ public JsonLineExportManager( DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java index bb7cc8c583..7ba86c1bef 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java @@ -34,6 +34,7 @@ public class CsvProducerTask extends ProducerTask { * Class constructor * * @param includeMetadata Include metadata in the exported data + * @param projectColumns list of columns that is required in export data * @param tableMetadata Metadata for a single ScalarDB table * @param columnDataTypes Map of data types for the all columns in a ScalarDB table * @param delimiter Delimiter used in csv content diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java index a09405553f..aabbba205c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonLineProducerTask.java @@ -16,18 +16,20 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +/** + * Producer that converts ScalarDB scan results to JSONLine content. The output is sent to a queue + * to be processed by a consumer + */ public class JsonLineProducerTask extends ProducerTask { private final DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); - private static final Logger logger = LoggerFactory.getLogger(JsonLineProducerTask.class); /** * Class constructor * * @param includeMetadata Include metadata in the exported data + * @param projectionColumns list of columns that is required in export data * @param tableMetadata Metadata for a single ScalarDB table * @param columnDataTypes Map of data types for the all columns in a ScalarDB table */ diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java index 8f61b714fd..742911e211 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/JsonProducerTask.java @@ -17,21 +17,24 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +/** + * Producer that converts ScalarDB scan results to JSON content. The output is sent to a queue to be + * processed by a consumer + */ public class JsonProducerTask extends ProducerTask { private final DataLoaderObjectMapper objectMapper = new DataLoaderObjectMapper(); private final boolean prettyPrintJson; - private static final Logger logger = LoggerFactory.getLogger(JsonProducerTask.class); /** * Class constructor * * @param includeMetadata Include metadata in the exported data + * @param projectionColumns list of columns that is required in export data * @param tableMetadata Metadata for a single ScalarDB table * @param columnDataTypes Map of data types for the all columns in a ScalarDB table + * @param prettyPrintJson Json data should be formatted or not */ public JsonProducerTask( boolean includeMetadata, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java deleted file mode 100644 index 9506fcd722..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerResult.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.scalar.db.dataloader.core.dataexport.producer; - -import com.fasterxml.jackson.databind.JsonNode; -import lombok.Builder; -import lombok.Value; - -@Builder -@Value -public class ProducerResult { - JsonNode jsonNode; - String csvSource; - boolean poisonPill; -} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java index 2ebf01ff10..acb9a90519 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTask.java @@ -8,11 +8,40 @@ import java.util.Map; import java.util.Set; +/** + * An abstract base class for producer tasks that process chunks of data retrieved from a ScalarDB + * table. + * + *

Subclasses are expected to implement the {@link #process(List)} method, which transforms a + * chunk of {@link Result} objects into a specific format (e.g., CSV, JSON). + * + *

This class manages metadata and column projection logic that can be used by all concrete + * implementations. + */ public abstract class ProducerTask { + /** + * The metadata of the table from which the data is being exported. Used to understand schema + * details such as column types and primary keys. + */ protected final TableMetadata tableMetadata; + + /** + * A map of column names to their corresponding data types. Derived from the table metadata to + * assist in formatting output correctly. + */ protected final Map dataTypeByColumnName; + + /** + * Flag indicating whether to include internal metadata columns (e.g., transaction metadata) in + * the output. + */ protected final boolean includeMetadata; + + /** + * A set of column names to include in the exported output. If empty, all columns are included by + * default. + */ protected final Set projectedColumnsSet; /** @@ -34,5 +63,12 @@ protected ProducerTask( this.dataTypeByColumnName = columnDataTypes; } + /** + * Processes a chunk of export data and returns a formatted string representation of the chunk. + * + * @param dataChunk the list of {@link Result} objects representing a chunk of data to be exported + * @return a formatted string representing the processed data chunk, ready to be written to the + * output + */ public abstract String process(List dataChunk); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java index 18adc8de6d..2339ab1366 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java @@ -8,6 +8,8 @@ import java.util.Map; import lombok.RequiredArgsConstructor; +/** Responsible to create producer task based on the export file format */ +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public class ProducerTaskFactory { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java index 7bf7645b0e..1a04071600 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java @@ -17,6 +17,7 @@ * A validator for ensuring that export options are consistent with the ScalarDB table metadata and * follow the defined constraints. */ +@SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class ExportOptionsValidator { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java index f0d8f9df65..07ef2dd756 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java @@ -34,6 +34,7 @@ *

  • Notifying listeners of various import events * */ +@SuppressWarnings("SameNameButDifferent") @AllArgsConstructor public class ImportManager implements ImportEventListener { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java index 7f2a805e75..8096123baa 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportMode.java @@ -2,7 +2,18 @@ /** Represents the way to be imported data is handled */ public enum ImportMode { + /** + * Insert mode: Adds new rows to the database. Fails if the row with the same primary key already + * exists. + */ INSERT, + + /** + * Update mode: Modifies existing rows in the database. Fails if the row with the specified key + * does not exist. + */ UPDATE, + + /** Upsert mode: Inserts new rows or updates existing ones if a row with the same key exists. */ UPSERT } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java index 6d3206765e..359fb1f881 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java @@ -7,7 +7,8 @@ import lombok.Builder; import lombok.Data; -/** Import options to import data into one or more ScalarDB tables */ +/** Import options to import data into one or more ScalarDB tables. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Data public class ImportOptions { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java index 6a2229c186..a6888abfb1 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFile.java @@ -11,6 +11,7 @@ * Represents a control file that holds control file tables which contains the column mappings that * maps a source file column to the actual database table column. */ +@SuppressWarnings("SameNameButDifferent") @Getter @Setter public class ControlFile { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java index efcfb0bc00..c65d05887e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTable.java @@ -12,6 +12,7 @@ * table name, and field mappings. This class is used to define how data from a control file maps to * a specific table in ScalarDB. */ +@SuppressWarnings("SameNameButDifferent") @Getter @Setter public class ControlFileTable { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java index 1068573304..74785579ec 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileTableFieldMapping.java @@ -10,6 +10,7 @@ * This class defines how data from a specific field in the input source should be mapped to the * corresponding column in the database. */ +@SuppressWarnings("SameNameButDifferent") @Getter @Setter public class ControlFileTableFieldMapping { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java index 3753d0ba65..37bccd66fa 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidationLevel.java @@ -1,11 +1,13 @@ package com.scalar.db.dataloader.core.dataimport.controlfile; -/** Control file validation level */ +/** Control file validation level. */ public enum ControlFileValidationLevel { - /* All columns need to be mapped */ + /** All columns need to be mapped. */ FULL, - /* All partition key and clustering key columns need to be mapped */ + + /** All partition key and clustering key columns need to be mapped. */ KEYS, - /* Only validate the columns that are mapped */ + + /** Only validate the columns that are mapped. */ MAPPED } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java index 0ed47cc647..54185b9b3a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbStorageManager.java @@ -3,30 +3,46 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.service.StorageFactory; -import java.io.IOException; import javax.annotation.Nullable; +/** + * A manager class for handling ScalarDB operations in storage mode. + * + *

    Provides access to {@link DistributedStorage} for data operations and {@link + * DistributedStorageAdmin} for administrative operations such as schema management. + * + *

    This class is typically used when interacting with ScalarDB in a non-transactional, + * storage-only configuration. + */ public class ScalarDbStorageManager { @Nullable private final DistributedStorage storage; private final DistributedStorageAdmin storageAdmin; /** - * Class constructor + * Constructs a {@code ScalarDbStorageManager} using the provided {@link StorageFactory}. * - * @param storageFactory Factory to create all the necessary ScalarDB data managers + * @param storageFactory the factory used to create the ScalarDB storage and admin instances */ - public ScalarDbStorageManager(StorageFactory storageFactory) throws IOException { + public ScalarDbStorageManager(StorageFactory storageFactory) { storage = storageFactory.getStorage(); storageAdmin = storageFactory.getStorageAdmin(); } - /** Returns distributed storage for ScalarDB connection that is running in storage mode */ + /** + * Returns distributed storage for ScalarDB connection that is running in storage mode + * + * @return distributed storage object + */ public DistributedStorage getDistributedStorage() { return storage; } - /** Returns distributed storage admin for ScalarDB admin operations */ + /** + * Returns distributed storage admin for ScalarDB admin operations + * + * @return distributed storage admin object + */ public DistributedStorageAdmin getDistributedStorageAdmin() { return storageAdmin; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java index 7999c01032..e3b6f1bb85 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbTransactionManager.java @@ -2,19 +2,32 @@ import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.service.TransactionFactory; -import java.io.IOException; +/** + * A manager class for handling ScalarDB operations in transaction mode. + * + *

    Provides access to {@link DistributedTransactionManager} for data operations + * + *

    This class is typically used when interacting with ScalarDB in a transactional configuration. + */ public class ScalarDbTransactionManager { private final DistributedTransactionManager transactionManager; - public ScalarDbTransactionManager(TransactionFactory transactionFactory) throws IOException { + /** + * Constructs a {@code ScalarDbTransactionManager} using the provided {@link TransactionFactory}. + * + * @param transactionFactory the factory used to create the ScalarDB storage and admin instances + */ + public ScalarDbTransactionManager(TransactionFactory transactionFactory) { transactionManager = transactionFactory.getTransactionManager(); } /** * Returns distributed Transaction manager for ScalarDB connection that is running in transaction * mode + * + * @return distributed transaction manager object */ public DistributedTransactionManager getDistributedTransactionManager() { return transactionManager; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java index 69ed97421a..65f1f2f895 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunk.java @@ -4,7 +4,8 @@ import lombok.Builder; import lombok.Data; -/** * Import data chunk data */ +/** Import data chunk data. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Data @Builder public class ImportDataChunk { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java index d6db3e1e7f..2fa6b1cf88 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatus.java @@ -6,7 +6,8 @@ import lombok.Builder; import lombok.Data; -/** * A DTO to store import data chunk details */ +/** A DTO to store import data chunk details. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Data @Builder @JsonDeserialize(builder = ImportDataChunkStatus.ImportDataChunkStatusBuilder.class) @@ -38,4 +39,13 @@ public class ImportDataChunkStatus { @JsonProperty("status") private final ImportDataChunkStatusState status; + + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    This class is normally generated automatically by Lombok's {@code @Builder} annotation. + * However, when using a custom builder method name (e.g., {@code hiddenBuilder()}), Javadoc may + * fail to resolve references to this builder unless it is explicitly declared. + */ + public static class ImportDataChunkStatusBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java index 9a6398a45e..4b9e443ae1 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportDataChunkStatusState.java @@ -1,8 +1,13 @@ package com.scalar.db.dataloader.core.dataimport.datachunk; -/** * Status of the import data chunk during the import process */ +/** Status of the import data chunk during the import process. */ public enum ImportDataChunkStatusState { + /** Indicates that the import of the data chunk has started but has not yet progressed. */ START, + + /** Indicates that the import of the data chunk is currently in progress. */ IN_PROGRESS, + + /** Indicates that the import of the data chunk has been successfully completed. */ COMPLETE } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java index 824ca4ffa4..84bcd0af39 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/datachunk/ImportRow.java @@ -3,7 +3,8 @@ import com.fasterxml.jackson.databind.JsonNode; import lombok.Value; -/** Stores data related to a single row on import file */ +/** Stores data related to a single row on import file. */ +@SuppressWarnings("SameNameButDifferent") @Value public class ImportRow { int rowNumber; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java index a4f58781fa..c440c6e8f6 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/AbstractImportLogger.java @@ -24,6 +24,7 @@ * logging transaction batch results and managing event listeners. Concrete implementations should * define how to log transaction batches and handle errors. */ +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public abstract class AbstractImportLogger implements ImportEventListener { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java index 218a3f9b9a..1f254025f5 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/ImportLoggerConfig.java @@ -10,6 +10,7 @@ */ @Value @Builder +@SuppressWarnings("SameNameButDifferent") public class ImportLoggerConfig { /** * The directory path where log files will be stored. This path should end with a directory diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java index cf0349366c..0af891ea54 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java @@ -1,7 +1,19 @@ package com.scalar.db.dataloader.core.dataimport.log; -/** Log modes available for import logging */ +/** + * Log modes available for import logging. Determines how logs are organized and written during the + * import process. + */ public enum LogMode { + /** + * Logs all import-related messages into a single log file. Useful for centralized and sequential + * log analysis. + */ SINGLE_FILE, + + /** + * Splits logs into separate files for each data chunk being imported. Useful for parallel + * processing and debugging individual chunks independently. + */ SPLIT_BY_DATA_CHUNK } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java index 95d13d2983..f1ba3c0b03 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SingleFileImportLogger.java @@ -30,9 +30,21 @@ @ThreadSafe public class SingleFileImportLogger extends AbstractImportLogger { + /** + * Name format for the file where the summary of the import process will be logged. This file + * typically includes overall statistics and a high-level status of the import. + */ protected static final String SUMMARY_LOG_FILE_NAME = "summary.log"; + + /** Name format for the file where successfully imported records will be logged in JSON format. */ protected static final String SUCCESS_LOG_FILE_NAME = "success.json"; + + /** + * Name format for the file where failed import records will be logged in JSON format, typically + * along with error details or reasons for failure. + */ protected static final String FAILURE_LOG_FILE_NAME = "failure.json"; + private static final Logger logger = LoggerFactory.getLogger(SingleFileImportLogger.class); private volatile LogWriter summaryLogWriter; private final LogWriter successLogWriter; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java index cff2d5d445..3970ca4017 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/SplitByDataChunkImportLogger.java @@ -36,8 +36,17 @@ @ThreadSafe public class SplitByDataChunkImportLogger extends AbstractImportLogger { + /** + * Name format for the file where the summary of the import process will be logged. This file + * typically includes overall statistics and a high-level status of the import. + */ protected static final String SUMMARY_LOG_FILE_NAME_FORMAT = "data_chunk_%s_summary.json"; + /** + * Name format for the file where failed import records will be logged in JSON format, typically + * along with error details or reasons for failure. + */ protected static final String FAILURE_LOG_FILE_NAME_FORMAT = "data_chunk_%s_failure.json"; + /** Name format for the file where successfully imported records will be logged in JSON format. */ protected static final String SUCCESS_LOG_FILE_NAME_FORMAT = "data_chunk_%s_success.json"; private static final Logger logger = LoggerFactory.getLogger(SplitByDataChunkImportLogger.class); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java index b85ee8a33b..c5ef96714d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/writer/DefaultLogWriterFactory.java @@ -10,6 +10,7 @@ * it creates. It's annotated with Lombok's {@code @AllArgsConstructor} to automatically generate a * constructor that initializes the configuration field. */ +@SuppressWarnings("SameNameButDifferent") @AllArgsConstructor public class DefaultLogWriterFactory implements LogWriterFactory { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java index c11ae3f655..11b54c93b6 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java @@ -41,11 +41,12 @@ * supports both transactional and non-transactional (storage) modes and provides event notification * capabilities for monitoring the import process. */ +@SuppressWarnings({"SameNameButDifferent", "FutureReturnValueIgnored"}) @RequiredArgsConstructor public abstract class ImportProcessor { final ImportProcessorParams params; - private static final Logger LOGGER = LoggerFactory.getLogger(ImportProcessor.class); + private static final Logger logger = LoggerFactory.getLogger(ImportProcessor.class); private final List listeners = new ArrayList<>(); /** @@ -316,13 +317,13 @@ private ImportTransactionBatchResult processTransactionBatch( } catch (TransactionException e) { isSuccess = false; - LOGGER.error(e.getMessage()); + logger.error(e.getMessage()); try { if (transaction != null) { transaction.abort(); // Ensure transaction is aborted } } catch (TransactionException abortException) { - LOGGER.error( + logger.error( "Failed to abort transaction: {}", abortException.getMessage(), abortException); } error = e.getMessage(); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java index 688b0ddf97..2d85325044 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java @@ -17,6 +17,7 @@ *

    This class is immutable and uses the Builder pattern for construction. It encapsulates all * required parameters and dependencies for processing data imports in ScalarDB. */ +@SuppressWarnings("SameNameButDifferent") @Builder @Value public class ImportProcessorParams { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java index 7102d5d4f2..c435f0f13d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java @@ -38,6 +38,11 @@ public class JsonImportProcessor extends ImportProcessor { private static final DataLoaderObjectMapper OBJECT_MAPPER = new DataLoaderObjectMapper(); private final AtomicInteger dataChunkIdCounter = new AtomicInteger(0); + /** + * Creates a new JsonImportProcessor with the specified parameters. + * + * @param params Configuration parameters for the import processor + */ public JsonImportProcessor(ImportProcessorParams params) { super(params); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java index 5e64e4d63d..e0b24e5aa4 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java @@ -38,6 +38,7 @@ * functionality to import data into single or multiple tables based on the provided import options * and control file configurations. */ +@SuppressWarnings({"SameNameButDifferent"}) @RequiredArgsConstructor public abstract class ImportTask { @@ -148,7 +149,8 @@ private List startMultiTableImportProcess( copyNode); targetResults.add(result); } - return targetResults; + // Wrapped in unmodifiable list to fix MixedMutabilityReturnType error-prone warning + return Collections.unmodifiableList(targetResults); } /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java index 6b8c95d2a8..804c4c9f3e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskAction.java @@ -1,7 +1,16 @@ package com.scalar.db.dataloader.core.dataimport.task; -/** * Import task actions */ +/** Represents the type of action to perform for an import task. */ public enum ImportTaskAction { + /** + * Insert action: Adds a new record to the database. Fails if the record with the same primary key + * already exists. + */ INSERT, - UPDATE, + + /** + * Update action: Modifies an existing record in the database. Fails if the record does not + * already exist. + */ + UPDATE } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java index 0026e23d16..3cf18e6d16 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java @@ -14,6 +14,7 @@ * Parameters required for executing an import task in the data loader. This class encapsulates all * necessary information needed to process and import a single record into ScalarDB. */ +@SuppressWarnings("SameNameButDifferent") @Builder @Value public class ImportTaskParams { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java index ed901651ae..3d3ca2f6e2 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java @@ -3,8 +3,6 @@ import com.scalar.db.api.DistributedTransaction; import com.scalar.db.api.Result; import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; -import com.scalar.db.exception.transaction.AbortException; -import com.scalar.db.exception.transaction.TransactionException; import com.scalar.db.io.Column; import com.scalar.db.io.Key; import java.util.List; @@ -83,24 +81,4 @@ protected void saveRecord( throws ScalarDbDaoException { params.getDao().put(namespace, tableName, partitionKey, clusteringKey, columns, transaction); } - - /** - * Aborts the active ScalarDB transaction if it has not been committed. - * - *

    This method provides a safe way to abort an active transaction, handling any abort-related - * exceptions by wrapping them in a {@link TransactionException}. - * - * @param tx the transaction to be aborted. If null, this method does nothing - * @throws TransactionException if an error occurs during the abort operation or if the underlying - * abort operation fails - */ - private void abortActiveTransaction(DistributedTransaction tx) throws TransactionException { - if (tx != null) { - try { - tx.abort(); - } catch (AbortException e) { - throw new TransactionException(e.getMessage(), tx.getId()); - } - } - } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java index 7f7524d263..c777dae66c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java @@ -4,6 +4,7 @@ import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTable; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTableFieldMapping; +/** To map source data to target columns using the control file mappings */ public class ImportDataMapping { /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java deleted file mode 100644 index 70c03b9c37..0000000000 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportResultStatus.java +++ /dev/null @@ -1,12 +0,0 @@ -package com.scalar.db.dataloader.core.dataimport.task.result; - -public enum ImportResultStatus { - SUCCESS, - PARTIAL_SUCCESS, - FAILURE, - VALIDATION_FAILED, - RETRIEVAL_FAILED, - MAPPING_FAILED, - TIMEOUT, - CANCELLED -} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java index 0fe4e0379d..55a5e2ba99 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResult.java @@ -6,6 +6,8 @@ import lombok.Builder; import lombok.Value; +/** To store import target result. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value public class ImportTargetResult { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java index d774f8f823..fa23324f08 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTargetResultStatus.java @@ -1,12 +1,42 @@ package com.scalar.db.dataloader.core.dataimport.task.result; +/** + * Import target result status. Represents the outcome of processing a single import target (e.g., a + * data row or record). + */ public enum ImportTargetResultStatus { + /** + * The import failed due to validation errors (e.g., missing required fields, invalid formats). + */ VALIDATION_FAILED, + + /** + * The import failed because the existing data could not be retrieved (e.g., due to I/O or + * database issues). + */ RETRIEVAL_FAILED, + + /** The import failed due to missing required columns in the input data. */ MISSING_COLUMNS, + + /** + * The import was skipped because the data already exists and cannot be overwritten (in INSERT + * mode). + */ DATA_ALREADY_EXISTS, + + /** The import failed because the required existing data was not found (e.g., in UPDATE mode). */ DATA_NOT_FOUND, + + /** The import failed during the save operation (e.g., due to database write errors). */ SAVE_FAILED, + + /** The import target was successfully saved to the database. */ SAVED, + + /** + * The import process was aborted before completion (e.g., due to a batch failure or external + * cancellation). + */ ABORTED } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java index 3e08cc709b..19c6859c4c 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/result/ImportTaskResult.java @@ -7,6 +7,8 @@ import lombok.Builder; import lombok.Value; +/** A DTO to store the import task result */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value @JsonDeserialize(builder = ImportTaskResult.ImportTaskResultBuilder.class) @@ -22,4 +24,13 @@ public class ImportTaskResult { @JsonProperty("dataChunkId") int dataChunkId; + + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    This class is normally generated automatically by Lombok's {@code @Builder} annotation. + * However, when using a custom builder method name (e.g., {@code hiddenBuilder()}), Javadoc may + * fail to resolve references to this builder unless it is explicitly declared. + */ + public static class ImportTaskResultBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java index 30b878b9e6..5c299f9a66 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java @@ -31,17 +31,29 @@ public void addErrorMessage(String columnName, String errorMessage) { this.errorMessages.add(errorMessage); } - /** @return Immutable list of validation error messages */ + /** + * Return error messages list. + * + * @return Immutable list of validation error messages. + */ public List getErrorMessages() { return Collections.unmodifiableList(this.errorMessages); } - /** @return Immutable set of columns that had errors */ + /** + * A set of columns with errors is stored and returned. + * + * @return Immutable set of columns that had errors. + */ public Set getColumnsWithErrors() { return Collections.unmodifiableSet(this.columnsWithErrors); } - /** @return Validation is valid or not */ + /** + * Stores validation result. + * + * @return Validation is valid or not. + */ public boolean isValid() { return this.errorMessages.isEmpty(); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java index 6d773ffccc..3fbc5ffc38 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java @@ -9,6 +9,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +/** Responsible for validating source data prior to import. */ +@SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class ImportSourceRecordValidator { @@ -21,6 +23,7 @@ public class ImportSourceRecordValidator { * @param columnNames List of all column names in table * @param sourceRecord source data * @param allColumnsRequired If true treat missing columns as an error + * @param tableMetadata metadata of the table to which data is to be imported * @return Source record validation result */ public static ImportSourceRecordValidationResult validateSourceRecord( @@ -83,6 +86,7 @@ public static void checkMissingKeys( * @param columnNames List of column names for a table * @param validationResult Source record validation result * @param ignoreColumns Columns that can be ignored in the check + * @param tableMetadata metadata of the table to which data is to be imported */ public static void checkMissingColumns( JsonNode sourceRecord, @@ -107,6 +111,7 @@ public static void checkMissingColumns( * @param sourceRecord Source json object * @param columnNames List of column names for a table * @param validationResult Source record validation result + * @param tableMetadata metadata of the table to which data is to be imported */ public static void checkMissingColumns( JsonNode sourceRecord, diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java index a922fd8afa..598824935a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatch.java @@ -5,7 +5,8 @@ import lombok.Builder; import lombok.Value; -/** Transaction batch details */ +/** Transaction batch details. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value public class ImportTransactionBatch { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java index 0e44b66953..072117fd80 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchResult.java @@ -7,7 +7,8 @@ import lombok.Builder; import lombok.Value; -/** Transaction batch result */ +/** Transaction batch result. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value @JsonDeserialize(builder = ImportTransactionBatchResult.ImportTransactionBatchResultBuilder.class) @@ -29,4 +30,13 @@ public class ImportTransactionBatchResult { @JsonProperty("success") boolean success; + + /** + * Explicit builder class declaration required for Javadoc generation. + * + *

    This class is normally generated automatically by Lombok's {@code @Builder} annotation. + * However, when using a custom builder method name (e.g., {@code hiddenBuilder()}), Javadoc may + * fail to resolve references to this builder unless it is explicitly declared. + */ + public static class ImportTransactionBatchResultBuilder {} } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java index 1b7bae34c6..42d37eb64d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/transactionbatch/ImportTransactionBatchStatus.java @@ -5,7 +5,8 @@ import lombok.Builder; import lombok.Value; -/** Batch status details */ +/** Batch status details. */ +@SuppressWarnings({"SameNameButDifferent", "MissingSummary"}) @Builder @Value public class ImportTransactionBatchStatus { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java index 8e79da3d6b..3b730a081a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataRequest.java @@ -3,6 +3,7 @@ import lombok.Getter; /** Represents the request for metadata for a single ScalarDB table */ +@SuppressWarnings("SameNameButDifferent") @Getter public class TableMetadataRequest { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java index 8816945800..f91435fe5f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java @@ -14,6 +14,7 @@ * Service for retrieving {@link TableMetadata} from ScalarDB. Provides methods to fetch metadata * for individual tables or a collection of tables. */ +@SuppressWarnings("SameNameButDifferent") @RequiredArgsConstructor public class TableMetadataService { diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java index a16e2fae02..75823f1a43 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/DebugUtil.java @@ -3,6 +3,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * A utility class for debugging purposes, providing methods to log runtime memory usage. + * + *

    This class is typically used to log memory usage at various stages of an application's + * execution to help diagnose memory-related issues or understand memory consumption patterns. + */ public class DebugUtil { private static final Logger logger = LoggerFactory.getLogger(DebugUtil.class); @@ -24,6 +30,12 @@ public static void logMemoryUsage(String stage) { formatMemorySize(maxMemory)); } + /** + * Converts the given memory size in bytes to a human-readable string in megabytes. + * + * @param size the memory size in bytes + * @return the formatted memory size in megabytes, rounded to two decimal places + */ private static String formatMemorySize(long size) { return String.format("%.2f", size / (1024.0 * 1024.0)); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java index c307ea961f..3cc165f5d3 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/PathUtil.java @@ -2,6 +2,11 @@ import java.io.File; +/** + * A utility class for common operations related to file system paths. + * + *

    Provides helper methods such as ensuring a trailing path separator for directory paths. + */ public class PathUtil { /** diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java index ddc15a1e59..7cd1834d83 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtil.java @@ -11,6 +11,7 @@ import lombok.NoArgsConstructor; /** Utility class for handling ScalarDB table metadata operations. */ +@SuppressWarnings("SameNameButDifferent") @NoArgsConstructor(access = AccessLevel.PRIVATE) public class TableMetadataUtil { diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTaskTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTaskTest.java index 89cfd28258..81a14a8292 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTaskTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTaskTest.java @@ -72,7 +72,6 @@ void process_withValidResultList_withPartialProjections_shouldReturnValidCsvStri List resultList = new ArrayList<>(); resultList.add(result); String output = csvProducerTask.process(resultList); - System.out.println(output); Assertions.assertEquals(expectedOutput, output.trim()); } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java index b5163eadb9..d60ebecb00 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorTest.java @@ -55,6 +55,7 @@ *

    Additionally, this class tests the thread executor behavior in ImportProcessor, including * proper shutdown, waiting for tasks to complete, handling interruptions, and task distribution. */ +@SuppressWarnings("SameNameButDifferent") @ExtendWith(MockitoExtension.class) class ImportProcessorTest { diff --git a/gradle/spotbugs-exclude.xml b/gradle/spotbugs-exclude.xml index 23254eb3ab..2f4be55e24 100644 --- a/gradle/spotbugs-exclude.xml +++ b/gradle/spotbugs-exclude.xml @@ -37,7 +37,7 @@ - +