diff --git a/core/src/main/java/com/scalar/db/api/AbacAdmin.java b/core/src/main/java/com/scalar/db/api/AbacAdmin.java index 287b82c724..cf34bd0212 100644 --- a/core/src/main/java/com/scalar/db/api/AbacAdmin.java +++ b/core/src/main/java/com/scalar/db/api/AbacAdmin.java @@ -1,6 +1,6 @@ package com.scalar.db.api; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import java.util.List; import java.util.Optional; diff --git a/core/src/main/java/com/scalar/db/api/Admin.java b/core/src/main/java/com/scalar/db/api/Admin.java index 4d7a088500..937c959aa0 100644 --- a/core/src/main/java/com/scalar/db/api/Admin.java +++ b/core/src/main/java/com/scalar/db/api/Admin.java @@ -1,6 +1,6 @@ package com.scalar.db.api; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import java.util.Collections; diff --git a/core/src/main/java/com/scalar/db/api/AuthAdmin.java b/core/src/main/java/com/scalar/db/api/AuthAdmin.java index 7b5d9a4377..debbfb7ac6 100644 --- a/core/src/main/java/com/scalar/db/api/AuthAdmin.java +++ b/core/src/main/java/com/scalar/db/api/AuthAdmin.java @@ -1,6 +1,6 @@ package com.scalar.db.api; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import java.util.List; import java.util.Optional; diff --git a/core/src/main/java/com/scalar/db/api/ConditionBuilder.java b/core/src/main/java/com/scalar/db/api/ConditionBuilder.java index b809d6be4b..8389112b64 100644 --- a/core/src/main/java/com/scalar/db/api/ConditionBuilder.java +++ b/core/src/main/java/com/scalar/db/api/ConditionBuilder.java @@ -1,7 +1,7 @@ package com.scalar.db.api; import com.scalar.db.api.ConditionalExpression.Operator; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.BigIntColumn; import com.scalar.db.io.BlobColumn; import com.scalar.db.io.BooleanColumn; diff --git a/core/src/main/java/com/scalar/db/api/GetBuilder.java b/core/src/main/java/com/scalar/db/api/GetBuilder.java index 0b4d2f8de9..c606a66be1 100644 --- a/core/src/main/java/com/scalar/db/api/GetBuilder.java +++ b/core/src/main/java/com/scalar/db/api/GetBuilder.java @@ -25,7 +25,7 @@ import com.scalar.db.api.OperationBuilder.WhereAnd; import com.scalar.db.api.OperationBuilder.WhereOr; import com.scalar.db.api.Selection.Conjunction; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.Key; import java.util.ArrayList; import java.util.Arrays; diff --git a/core/src/main/java/com/scalar/db/api/LikeExpression.java b/core/src/main/java/com/scalar/db/api/LikeExpression.java index 084fc20687..aa9dcb4956 100644 --- a/core/src/main/java/com/scalar/db/api/LikeExpression.java +++ b/core/src/main/java/com/scalar/db/api/LikeExpression.java @@ -1,7 +1,7 @@ package com.scalar.db.api; import com.google.common.base.MoreObjects; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.TextColumn; import java.util.Objects; import javax.annotation.Nonnull; diff --git a/core/src/main/java/com/scalar/db/api/Put.java b/core/src/main/java/com/scalar/db/api/Put.java index cf14bbfc1a..a144668b7b 100644 --- a/core/src/main/java/com/scalar/db/api/Put.java +++ b/core/src/main/java/com/scalar/db/api/Put.java @@ -7,7 +7,7 @@ import com.google.common.collect.ImmutableSet; import com.scalar.db.api.PutBuilder.BuildableFromExisting; import com.scalar.db.api.PutBuilder.Namespace; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.BigIntColumn; import com.scalar.db.io.BlobColumn; import com.scalar.db.io.BooleanColumn; diff --git a/core/src/main/java/com/scalar/db/api/ReplicationAdmin.java b/core/src/main/java/com/scalar/db/api/ReplicationAdmin.java index ce595d8d64..ba82fee94e 100644 --- a/core/src/main/java/com/scalar/db/api/ReplicationAdmin.java +++ b/core/src/main/java/com/scalar/db/api/ReplicationAdmin.java @@ -1,6 +1,6 @@ package com.scalar.db.api; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import java.util.Map; diff --git a/core/src/main/java/com/scalar/db/api/ScanBuilder.java b/core/src/main/java/com/scalar/db/api/ScanBuilder.java index 116c6c9bb2..c98a8371e6 100644 --- a/core/src/main/java/com/scalar/db/api/ScanBuilder.java +++ b/core/src/main/java/com/scalar/db/api/ScanBuilder.java @@ -29,7 +29,7 @@ import com.scalar.db.api.OperationBuilder.WhereAnd; import com.scalar.db.api.OperationBuilder.WhereOr; import com.scalar.db.api.Selection.Conjunction; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.Key; import java.util.ArrayList; import java.util.Arrays; diff --git a/core/src/main/java/com/scalar/db/api/TableMetadata.java b/core/src/main/java/com/scalar/db/api/TableMetadata.java index 9adb868a23..b243f87d6c 100644 --- a/core/src/main/java/com/scalar/db/api/TableMetadata.java +++ b/core/src/main/java/com/scalar/db/api/TableMetadata.java @@ -4,7 +4,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.scalar.db.api.Scan.Ordering.Order; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.DataType; import com.scalar.db.util.ImmutableLinkedHashSet; import java.util.HashMap; diff --git a/core/src/main/java/com/scalar/db/api/TransactionState.java b/core/src/main/java/com/scalar/db/api/TransactionState.java index d2079e9c4a..a87008d879 100644 --- a/core/src/main/java/com/scalar/db/api/TransactionState.java +++ b/core/src/main/java/com/scalar/db/api/TransactionState.java @@ -1,6 +1,6 @@ package com.scalar.db.api; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; public enum TransactionState { PREPARED(1), diff --git a/core/src/main/java/com/scalar/db/common/AbstractResult.java b/core/src/main/java/com/scalar/db/common/AbstractResult.java index 0976f49813..2edff0715e 100644 --- a/core/src/main/java/com/scalar/db/common/AbstractResult.java +++ b/core/src/main/java/com/scalar/db/common/AbstractResult.java @@ -4,7 +4,6 @@ import com.google.common.base.MoreObjects.ToStringHelper; import com.google.common.base.Suppliers; import com.scalar.db.api.Result; -import com.scalar.db.common.error.CoreError; import com.scalar.db.io.Column; import com.scalar.db.io.Value; import com.scalar.db.util.ScalarDbUtils; diff --git a/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedDistributedTransactionManager.java b/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedDistributedTransactionManager.java index 5cce5603e4..f5cdb0bcf9 100644 --- a/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedDistributedTransactionManager.java +++ b/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedDistributedTransactionManager.java @@ -12,7 +12,6 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.transaction.AbortException; import com.scalar.db.exception.transaction.CommitException; import com.scalar.db.exception.transaction.CrudException; diff --git a/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedTwoPhaseCommitTransactionManager.java b/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedTwoPhaseCommitTransactionManager.java index f1d52f897d..af5d630ef2 100644 --- a/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedTwoPhaseCommitTransactionManager.java +++ b/core/src/main/java/com/scalar/db/common/ActiveTransactionManagedTwoPhaseCommitTransactionManager.java @@ -12,7 +12,6 @@ import com.scalar.db.api.TwoPhaseCommitTransactionManager; import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.transaction.AbortException; import com.scalar.db.exception.transaction.CommitException; import com.scalar.db.exception.transaction.CrudException; diff --git a/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java b/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java index 51aedf8e92..0786f02456 100644 --- a/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java +++ b/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java @@ -3,7 +3,6 @@ import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.StorageInfo; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; diff --git a/core/src/main/java/com/scalar/db/common/error/CoreError.java b/core/src/main/java/com/scalar/db/common/CoreError.java similarity index 72% rename from core/src/main/java/com/scalar/db/common/error/CoreError.java rename to core/src/main/java/com/scalar/db/common/CoreError.java index 987c8ba11c..d312dabe6b 100644 --- a/core/src/main/java/com/scalar/db/common/error/CoreError.java +++ b/core/src/main/java/com/scalar/db/common/CoreError.java @@ -1,4 +1,7 @@ -package com.scalar.db.common.error; +package com.scalar.db.common; + +import com.scalar.db.common.error.Category; +import com.scalar.db.common.error.ScalarDbError; public enum CoreError implements ScalarDbError { @@ -118,7 +121,8 @@ public enum CoreError implements ScalarDbError { AUTH_NOT_ENABLED( Category.USER_ERROR, "0022", - "The authentication and authorization feature is not enabled. To use this feature, you must enable it. Note that this feature is supported only in the ScalarDB Enterprise edition", + "The authentication and authorization feature is not enabled. To use this feature, you must enable it." + + " Note that this feature is supported only in the ScalarDB Enterprise edition", "", ""), CONDITION_BUILD_ERROR_CONDITION_NOT_ALLOWED_FOR_PUT_IF( @@ -195,9 +199,9 @@ public enum CoreError implements ScalarDbError { "", ""), TABLE_METADATA_BUILD_ERROR_NO_COLUMNS_SPECIFIED( - Category.USER_ERROR, "0038", "One or more columns must be specified.", "", ""), + Category.USER_ERROR, "0038", "One or more columns must be specified", "", ""), TABLE_METADATA_BUILD_ERROR_NO_PARTITION_KEYS_SPECIFIED( - Category.USER_ERROR, "0039", "One or more partition keys must be specified.", "", ""), + Category.USER_ERROR, "0039", "One or more partition keys must be specified", "", ""), TABLE_METADATA_BUILD_ERROR_PARTITION_KEY_COLUMN_DEFINITION_NOT_SPECIFIED( Category.USER_ERROR, "0040", @@ -463,93 +467,12 @@ public enum CoreError implements ScalarDbError { CONSENSUS_COMMIT_TRANSACTION_NOT_VALIDATED_IN_SERIALIZABLE( Category.USER_ERROR, "0107", - "The transaction is not validated." - + " When using the SERIALIZABLE isolation level, you need to call validate()" - + " before calling commit()", + "The transaction is not validated. When using the SERIALIZABLE isolation level," + + " you need to call validate() before calling commit()", "", ""), DYNAMO_BATCH_SIZE_EXCEEDED( Category.USER_ERROR, "0108", "DynamoDB cannot batch more than 100 mutations at once", "", ""), - SCHEMA_LOADER_ALTERING_PARTITION_KEYS_NOT_SUPPORTED( - Category.USER_ERROR, - "0109", - "The partition keys of the table %s.%s were modified, but altering partition keys is not supported", - "", - ""), - SCHEMA_LOADER_ALTERING_CLUSTERING_KEYS_NOT_SUPPORTED( - Category.USER_ERROR, - "0110", - "The clustering keys of the table %s.%s were modified, but altering clustering keys is not supported", - "", - ""), - SCHEMA_LOADER_ALTERING_CLUSTERING_ORDER_NOT_SUPPORTED( - Category.USER_ERROR, - "0111", - "The clustering ordering of the table %s.%s were modified, but altering clustering ordering is not supported", - "", - ""), - SCHEMA_LOADER_DELETING_COLUMN_NOT_SUPPORTED( - Category.USER_ERROR, - "0112", - "The column %s of the table %s.%s has been deleted. Column deletion is not supported when altering a table", - "", - ""), - SCHEMA_LOADER_ALTERING_COLUMN_DATA_TYPE_NOT_SUPPORTED( - Category.USER_ERROR, - "0113", - "The data type of the column %s of the table %s.%s was modified, but altering data types is not supported", - "", - ""), - SCHEMA_LOADER_SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_REPAIR_ALL( - Category.USER_ERROR, - "0114", - "Specifying the '--schema-file' option is required when using the '--repair-all' option", - "", - ""), - SCHEMA_LOADER_SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_ALTER( - Category.USER_ERROR, - "0115", - "Specifying the '--schema-file' option is required when using the '--alter' option", - "", - ""), - SCHEMA_LOADER_SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_IMPORT( - Category.USER_ERROR, - "0116", - "Specifying the '--schema-file' option is required when using the '--import' option", - "", - ""), - SCHEMA_LOADER_SPECIFYING_COORDINATOR_WITH_IMPORT_NOT_ALLOWED( - Category.USER_ERROR, - "0117", - "Specifying the '--coordinator' option with the '--import' option is not allowed." - + " Create Coordinator tables separately", - "", - ""), - SCHEMA_LOADER_READING_CONFIG_FILE_FAILED( - Category.USER_ERROR, "0118", "Reading the configuration file failed. File: %s", "", ""), - SCHEMA_LOADER_READING_SCHEMA_FILE_FAILED( - Category.USER_ERROR, "0119", "Reading the schema file failed. File: %s", "", ""), - SCHEMA_LOADER_PARSING_SCHEMA_JSON_FAILED( - Category.USER_ERROR, "0120", "Parsing the schema JSON failed. Details: %s", "", ""), - SCHEMA_LOADER_PARSE_ERROR_TABLE_NAME_MUST_CONTAIN_NAMESPACE_AND_TABLE( - Category.USER_ERROR, - "0121", - "The table name must contain the namespace and the table. Table: %s", - "", - ""), - SCHEMA_LOADER_PARSE_ERROR_PARTITION_KEY_MUST_BE_SPECIFIED( - Category.USER_ERROR, "0122", "The partition key must be specified. Table: %s", "", ""), - SCHEMA_LOADER_PARSE_ERROR_INVALID_CLUSTERING_KEY_FORMAT( - Category.USER_ERROR, - "0123", - "Invalid clustering-key format. The clustering key must be in the format of 'column_name' or 'column_name ASC/DESC'." - + " Table: %s; Clustering key: %s", - "", - ""), - SCHEMA_LOADER_PARSE_ERROR_COLUMNS_NOT_SPECIFIED( - Category.USER_ERROR, "0124", "Columns must be specified. Table: %s", "", ""), - SCHEMA_LOADER_PARSE_ERROR_INVALID_COLUMN_TYPE( - Category.USER_ERROR, "0125", "Invalid column type. Table: %s; Column: %s; Type: %s", "", ""), OPERATION_CHECK_ERROR_UNSUPPORTED_MUTATION_TYPE( Category.USER_ERROR, "0126", @@ -580,32 +503,6 @@ public enum CoreError implements ScalarDbError { "Cross-partition scan with ordering is not supported in DynamoDB", "", ""), - DATA_LOADER_DIRECTORY_WRITE_ACCESS_NOT_ALLOWED( - Category.USER_ERROR, - "0131", - "The directory '%s' does not have write permissions. Please ensure that the current user has write access to the directory.", - "", - ""), - DATA_LOADER_DIRECTORY_CREATE_FAILED( - Category.USER_ERROR, - "0132", - "Failed to create the directory '%s'. Please check if you have sufficient permissions and if there are any file system restrictions. Details: %s", - "", - ""), - DATA_LOADER_MISSING_DIRECTORY_NOT_ALLOWED( - Category.USER_ERROR, "0133", "Directory path cannot be null or empty.", "", ""), - DATA_LOADER_MISSING_FILE_EXTENSION( - Category.USER_ERROR, - "0134", - "No file extension was found on the provided file name %s.", - "", - ""), - DATA_LOADER_INVALID_FILE_EXTENSION( - Category.USER_ERROR, - "0135", - "Invalid file extension: %s. Allowed extensions are: %s", - "", - ""), SINGLE_CRUD_OPERATION_TRANSACTION_GETTING_TRANSACTION_STATE_NOT_SUPPORTED( Category.USER_ERROR, "0136", @@ -652,7 +549,8 @@ public enum CoreError implements ScalarDbError { ENCRYPTION_NOT_ENABLED( Category.USER_ERROR, "0143", - "The encryption feature is not enabled. To encrypt data at rest, you must enable this feature. Note that this feature is supported only in the ScalarDB Enterprise edition", + "The encryption feature is not enabled. To encrypt data at rest, you must enable this feature." + + " Note that this feature is supported only in the ScalarDB Enterprise edition", "", ""), INVALID_VARIABLE_KEY_COLUMN_SIZE( @@ -680,52 +578,11 @@ public enum CoreError implements ScalarDbError { "Deleting data already-inserted by the same transaction is not allowed", "", ""), - DATA_LOADER_INVALID_COLUMN_NON_EXISTENT( - Category.USER_ERROR, - "0148", - "Invalid key: Column %s does not exist in the table %s in namespace %s.", - "", - ""), - DATA_LOADER_INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE( - Category.USER_ERROR, - "0149", - "Invalid base64 encoding for blob value '%s' for column %s in table %s in namespace %s", - "", - ""), - DATA_LOADER_INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE( - Category.USER_ERROR, - "0150", - "Invalid number '%s' specified for column %s in table %s in namespace %s", - "", - ""), - DATA_LOADER_ERROR_METHOD_NULL_ARGUMENT( - Category.USER_ERROR, "0151", "Method null argument not allowed", "", ""), ABAC_NOT_ENABLED( Category.USER_ERROR, "0152", - "The attribute-based access control feature is not enabled. To use this feature, you must enable it. Note that this feature is supported only in the ScalarDB Enterprise edition", - "", - ""), - DATA_LOADER_CLUSTERING_KEY_NOT_FOUND( - Category.USER_ERROR, "0153", "The provided clustering key %s was not found", "", ""), - DATA_LOADER_INVALID_PROJECTION( - Category.USER_ERROR, "0154", "The column '%s' was not found", "", ""), - DATA_LOADER_INCOMPLETE_PARTITION_KEY( - Category.USER_ERROR, - "0155", - "The provided partition key is incomplete. Required key: %s", - "", - ""), - DATA_LOADER_CLUSTERING_KEY_ORDER_MISMATCH( - Category.USER_ERROR, - "0156", - "The provided clustering key order does not match the table schema. Required order: %s", - "", - ""), - DATA_LOADER_PARTITION_KEY_ORDER_MISMATCH( - Category.USER_ERROR, - "0157", - "The provided partition key order does not match the table schema. Required order: %s", + "The attribute-based access control feature is not enabled. To use this feature, you must enable it." + + " Note that this feature is supported only in the ScalarDB Enterprise edition", "", ""), OUT_OF_RANGE_COLUMN_VALUE_FOR_DATE( @@ -743,7 +600,8 @@ public enum CoreError implements ScalarDbError { OUT_OF_RANGE_COLUMN_VALUE_FOR_TIMESTAMP( Category.USER_ERROR, "0160", - "This TIMESTAMP column value is out of the valid range. It must be between 1000-01-01T00:00:00.000 and 9999-12-31T23:59:59.999. Value: %s", + "This TIMESTAMP column value is out of the valid range." + + " It must be between 1000-01-01T00:00:00.000 and 9999-12-31T23:59:59.999. Value: %s", "", ""), SUBMILLISECOND_PRECISION_NOT_SUPPORTED_FOR_TIMESTAMP( @@ -755,7 +613,8 @@ public enum CoreError implements ScalarDbError { OUT_OF_RANGE_COLUMN_VALUE_FOR_TIMESTAMPTZ( Category.USER_ERROR, "0162", - "This TIMESTAMPTZ column value is out of the valid range. It must be between 1000-01-01T00:00:00.000Z to 9999-12-31T23:59:59.999Z. Value: %s", + "This TIMESTAMPTZ column value is out of the valid range." + + " It must be between 1000-01-01T00:00:00.000Z to 9999-12-31T23:59:59.999Z. Value: %s", "", ""), SUBMILLISECOND_PRECISION_NOT_SUPPORTED_FOR_TIMESTAMPTZ( @@ -770,166 +629,25 @@ public enum CoreError implements ScalarDbError { "The underlying-storage data type %s is not supported as the ScalarDB %s data type: %s", "", ""), - DATA_LOADER_MISSING_NAMESPACE_OR_TABLE( - Category.USER_ERROR, "0165", "Missing namespace or table: %s, %s", "", ""), - DATA_LOADER_TABLE_METADATA_RETRIEVAL_FAILED( - Category.USER_ERROR, "0166", "Failed to retrieve table metadata. Details: %s", "", ""), - DATA_LOADER_DUPLICATE_DATA_MAPPINGS( - Category.USER_ERROR, - "0167", - "Duplicate data mappings found for table '%s' in the control file", - "", - ""), - DATA_LOADER_MISSING_COLUMN_MAPPING( - Category.USER_ERROR, - "0168", - "No mapping found for column '%s' in table '%s' in the control file. Control file validation set at 'FULL'. All columns need to be mapped.", - "", - ""), - DATA_LOADER_CONTROL_FILE_MISSING_DATA_MAPPINGS( - Category.USER_ERROR, "0169", "The control file is missing data mappings", "", ""), - DATA_LOADER_TARGET_COLUMN_NOT_FOUND( - Category.USER_ERROR, - "0170", - "The target column '%s' for source field '%s' could not be found in table '%s'", - "", - ""), - DATA_LOADER_MISSING_PARTITION_KEY( - Category.USER_ERROR, - "0171", - "The required partition key '%s' is missing in the control file mapping for table '%s'", - "", - ""), - DATA_LOADER_MISSING_CLUSTERING_KEY( - Category.USER_ERROR, - "0172", - "The required clustering key '%s' is missing in the control file mapping for table '%s'", - "", - ""), - DATA_LOADER_MULTIPLE_MAPPINGS_FOR_COLUMN_FOUND( - Category.USER_ERROR, - "0173", - "Duplicated data mappings found for column '%s' in table '%s'", - "", - ""), - DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN( - Category.USER_ERROR, - "0174", - "Missing required field or column mapping for clustering key %s", - "", - ""), - DATA_LOADER_MISSING_PARTITION_KEY_COLUMN( - Category.USER_ERROR, - "0175", - "Missing required field or column mapping for partition key %s", - "", - ""), - DATA_LOADER_MISSING_COLUMN( - Category.USER_ERROR, "0176", "Missing field or column mapping for %s", "", ""), - DATA_LOADER_VALUE_TO_STRING_CONVERSION_FAILED( - Category.USER_ERROR, - "0177", - "Something went wrong while converting the ScalarDB values to strings. The table metadata and Value datatype probably do not match. Details: %s", - "", - ""), - DATA_LOADER_FILE_FORMAT_NOT_SUPPORTED( - Category.USER_ERROR, "0178", "The provided file format is not supported : %s", "", ""), - DATA_LOADER_COULD_NOT_FIND_PARTITION_KEY( - Category.USER_ERROR, "0179", "Could not find the partition key", "", ""), - DATA_LOADER_UPSERT_INSERT_MISSING_COLUMNS( - Category.USER_ERROR, - "0180", - "The source record needs to contain all fields if the UPSERT turns into an INSERT", - "", - ""), - DATA_LOADER_DATA_ALREADY_EXISTS(Category.USER_ERROR, "0181", "Record already exists", "", ""), - DATA_LOADER_DATA_NOT_FOUND(Category.USER_ERROR, "0182", "Record was not found", "", ""), - DATA_LOADER_COULD_NOT_FIND_CLUSTERING_KEY( - Category.USER_ERROR, "0183", "Could not find the clustering key", "", ""), - DATA_LOADER_TABLE_METADATA_MISSING( - Category.USER_ERROR, "0184", "No table metadata found", "", ""), - DATA_LOADER_MISSING_SOURCE_FIELD( - Category.USER_ERROR, - "0185", - "The data mapping source field '%s' for table '%s' is missing in the json data record", - "", - ""), - DATA_LOADER_CSV_DATA_MISMATCH( - Category.USER_ERROR, "0186", "The CSV row: %s does not match header: %s.", "", ""), - DATA_LOADER_JSON_CONTENT_START_ERROR( - Category.USER_ERROR, "0187", "Expected JSON file content to be an array", "", ""), REPLICATION_NOT_ENABLED( Category.USER_ERROR, "0188", - "The replication feature is not enabled. To use this feature, you must enable it. Note that this feature is supported only in the ScalarDB Enterprise edition", + "The replication feature is not enabled. To use this feature, you must enable it." + + " Note that this feature is supported only in the ScalarDB Enterprise edition", "", ""), - DATA_LOADER_IMPORT_TARGET_MISSING( - Category.USER_ERROR, - "0189", - "Missing option: either '--namespace' and'--table' or '--control-file' options must be specified.", - "", - ""), - DATA_LOADER_MISSING_IMPORT_FILE( - Category.USER_ERROR, - "0190", - "The file '%s' specified by the argument '%s' does not exist.", - "", - ""), - DATA_LOADER_LOG_DIRECTORY_WRITE_ACCESS_DENIED( - Category.USER_ERROR, "0191", "Cannot write to the log directory: %s", "", ""), - DATA_LOADER_LOG_DIRECTORY_CREATION_FAILED( - Category.USER_ERROR, "0192", "Failed to create the log directory: %s", "", ""), - DATA_LOADER_INVALID_CONTROL_FILE( - Category.USER_ERROR, "0193", "Failed to parse the control file: %s", "", ""), - DATA_LOADER_DIRECTORY_WRITE_ACCESS( - Category.USER_ERROR, - "0194", - "No permission to create or write files in the directory: %s", - "", - ""), - DATA_LOADER_DIRECTORY_CREATION_FAILED( - Category.USER_ERROR, "0195", "Failed to create the directory: %s", "", ""), - DATA_LOADER_PATH_IS_NOT_A_DIRECTORY( - Category.USER_ERROR, "0196", "Path exists but is not a directory: %s", "", ""), - DATA_LOADER_FILE_PATH_IS_BLANK( - Category.USER_ERROR, "0197", "File path must not be blank.", "", ""), - DATA_LOADER_FILE_NOT_FOUND(Category.USER_ERROR, "0198", "File not found: %s", "", ""), - DATA_LOADER_INVALID_DATE_TIME_FOR_COLUMN_VALUE( - Category.USER_ERROR, - "0199", - "Invalid date time value '%s' specified for column %s in table %s in namespace %s.", - "", - ""), - DATA_LOADER_NULL_OR_EMPTY_KEY_VALUE_INPUT( - Category.USER_ERROR, "0200", "Key-value cannot be null or empty", "", ""), - DATA_LOADER_INVALID_KEY_VALUE_INPUT( - Category.USER_ERROR, "0201", "Invalid key-value format: %s", "", ""), - DATA_LOADER_SPLIT_INPUT_VALUE_NULL(Category.USER_ERROR, "0202", "Value must not be null", "", ""), - DATA_LOADER_SPLIT_INPUT_DELIMITER_NULL( - Category.USER_ERROR, "0203", "Delimiter must not be null", "", ""), - DATA_LOADER_CONFIG_FILE_PATH_BLANK( - Category.USER_ERROR, "0204", "Config file path must not be blank", "", ""), CONSENSUS_COMMIT_SCANNER_NOT_CLOSED( Category.USER_ERROR, "0205", - "Some scanners were not closed. All scanners must be closed before committing the transaction.", + "Some scanners were not closed. All scanners must be closed before committing the transaction", "", ""), TWO_PHASE_CONSENSUS_COMMIT_SCANNER_NOT_CLOSED( Category.USER_ERROR, "0206", - "Some scanners were not closed. All scanners must be closed before preparing the transaction.", + "Some scanners were not closed. All scanners must be closed before preparing the transaction", "", ""), - DATA_LOADER_INVALID_DATA_CHUNK_SIZE( - Category.USER_ERROR, "0207", "Data chunk size must be greater than 0", "", ""), - DATA_LOADER_INVALID_TRANSACTION_SIZE( - Category.USER_ERROR, "0208", "Transaction size must be greater than 0", "", ""), - DATA_LOADER_INVALID_MAX_THREADS( - Category.USER_ERROR, "0209", "Number of max threads must be greater than 0", "", ""), - DATA_LOADER_INVALID_DATA_CHUNK_QUEUE_SIZE( - Category.USER_ERROR, "0210", "Data chunk queue size must be greater than 0", "", ""), MUTATION_NOT_ALLOWED_IN_READ_ONLY_TRANSACTION( Category.USER_ERROR, "0211", @@ -1205,32 +923,6 @@ public enum CoreError implements ScalarDbError { "Handling the before-preparation snapshot hook failed. Details: %s", "", ""), - DATA_LOADER_ERROR_CRUD_EXCEPTION( - Category.INTERNAL_ERROR, - "0047", - "Something went wrong while trying to save the data. Details: %s", - "", - ""), - DATA_LOADER_ERROR_SCAN( - Category.INTERNAL_ERROR, - "0048", - "Something went wrong while scanning. Are you sure you are running in the correct transaction mode? Details: %s", - "", - ""), - DATA_LOADER_CSV_FILE_READ_FAILED( - Category.INTERNAL_ERROR, "0049", "Failed to read CSV file. Details: %s.", "", ""), - DATA_LOADER_CSV_FILE_HEADER_READ_FAILED( - Category.INTERNAL_ERROR, "0050", "Failed to CSV read header line. Details: %s.", "", ""), - DATA_LOADER_DATA_CHUNK_PROCESS_FAILED( - Category.INTERNAL_ERROR, - "0051", - "Data chunk processing was interrupted. Details: %s", - "", - ""), - DATA_LOADER_JSON_FILE_READ_FAILED( - Category.INTERNAL_ERROR, "0052", "Failed to read JSON file. Details: %s.", "", ""), - DATA_LOADER_JSONLINES_FILE_READ_FAILED( - Category.INTERNAL_ERROR, "0053", "Failed to read JSON Lines file. Details: %s.", "", ""), JDBC_TRANSACTION_GETTING_SCANNER_FAILED( Category.INTERNAL_ERROR, "0054", "Getting the scanner failed. Details: %s", "", ""), JDBC_CLOSING_SCANNER_FAILED( diff --git a/core/src/main/java/com/scalar/db/common/ProjectedResult.java b/core/src/main/java/com/scalar/db/common/ProjectedResult.java index 8d93238199..8bad8de031 100644 --- a/core/src/main/java/com/scalar/db/common/ProjectedResult.java +++ b/core/src/main/java/com/scalar/db/common/ProjectedResult.java @@ -2,7 +2,6 @@ import com.google.common.collect.ImmutableSet; import com.scalar.db.api.Result; -import com.scalar.db.common.error.CoreError; import com.scalar.db.io.Column; import com.scalar.db.io.Key; import java.nio.ByteBuffer; diff --git a/core/src/main/java/com/scalar/db/common/ReadOnlyDistributedTransaction.java b/core/src/main/java/com/scalar/db/common/ReadOnlyDistributedTransaction.java index ffff08acaf..5275c5e084 100644 --- a/core/src/main/java/com/scalar/db/common/ReadOnlyDistributedTransaction.java +++ b/core/src/main/java/com/scalar/db/common/ReadOnlyDistributedTransaction.java @@ -7,7 +7,6 @@ import com.scalar.db.api.Put; import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.transaction.CrudException; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; diff --git a/core/src/main/java/com/scalar/db/common/StateManagedDistributedTransactionManager.java b/core/src/main/java/com/scalar/db/common/StateManagedDistributedTransactionManager.java index b2034765bb..498b1e2e6b 100644 --- a/core/src/main/java/com/scalar/db/common/StateManagedDistributedTransactionManager.java +++ b/core/src/main/java/com/scalar/db/common/StateManagedDistributedTransactionManager.java @@ -12,7 +12,6 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.transaction.AbortException; import com.scalar.db.exception.transaction.CommitException; import com.scalar.db.exception.transaction.CrudException; diff --git a/core/src/main/java/com/scalar/db/common/StateManagedTwoPhaseCommitTransactionManager.java b/core/src/main/java/com/scalar/db/common/StateManagedTwoPhaseCommitTransactionManager.java index 57aa36c9a2..3935b0df32 100644 --- a/core/src/main/java/com/scalar/db/common/StateManagedTwoPhaseCommitTransactionManager.java +++ b/core/src/main/java/com/scalar/db/common/StateManagedTwoPhaseCommitTransactionManager.java @@ -12,7 +12,6 @@ import com.scalar.db.api.TwoPhaseCommitTransactionManager; import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.transaction.AbortException; import com.scalar.db.exception.transaction.CommitException; import com.scalar.db.exception.transaction.CrudException; diff --git a/core/src/main/java/com/scalar/db/common/TableMetadataManager.java b/core/src/main/java/com/scalar/db/common/TableMetadataManager.java index 247f33681b..264a8904a9 100644 --- a/core/src/main/java/com/scalar/db/common/TableMetadataManager.java +++ b/core/src/main/java/com/scalar/db/common/TableMetadataManager.java @@ -6,7 +6,6 @@ import com.scalar.db.api.Admin; import com.scalar.db.api.Operation; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.util.ScalarDbUtils; import com.scalar.db.util.ThrowableFunction; diff --git a/core/src/main/java/com/scalar/db/common/checker/OperationChecker.java b/core/src/main/java/com/scalar/db/common/checker/OperationChecker.java index 869556f316..2754f0e471 100644 --- a/core/src/main/java/com/scalar/db/common/checker/OperationChecker.java +++ b/core/src/main/java/com/scalar/db/common/checker/OperationChecker.java @@ -13,9 +13,9 @@ import com.scalar.db.api.Selection.Conjunction; import com.scalar.db.api.StorageInfo; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.Column; diff --git a/core/src/main/java/com/scalar/db/config/ConfigUtils.java b/core/src/main/java/com/scalar/db/config/ConfigUtils.java index 82a8151107..7dda6cf615 100644 --- a/core/src/main/java/com/scalar/db/config/ConfigUtils.java +++ b/core/src/main/java/com/scalar/db/config/ConfigUtils.java @@ -3,7 +3,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; diff --git a/core/src/main/java/com/scalar/db/config/DatabaseConfig.java b/core/src/main/java/com/scalar/db/config/DatabaseConfig.java index 593e2fc9bc..e5a09e468e 100644 --- a/core/src/main/java/com/scalar/db/config/DatabaseConfig.java +++ b/core/src/main/java/com/scalar/db/config/DatabaseConfig.java @@ -8,7 +8,7 @@ import static com.scalar.db.config.ConfigUtils.getStringArray; import com.google.common.collect.ImmutableList; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.File; import java.io.FileInputStream; diff --git a/core/src/main/java/com/scalar/db/io/BigIntColumn.java b/core/src/main/java/com/scalar/db/io/BigIntColumn.java index 2afb614e89..de4dcb2b4f 100644 --- a/core/src/main/java/com/scalar/db/io/BigIntColumn.java +++ b/core/src/main/java/com/scalar/db/io/BigIntColumn.java @@ -2,7 +2,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ComparisonChain; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.util.Objects; import java.util.Optional; import javax.annotation.Nullable; diff --git a/core/src/main/java/com/scalar/db/io/DateColumn.java b/core/src/main/java/com/scalar/db/io/DateColumn.java index 16c7dc4430..b4ddcfc3d2 100644 --- a/core/src/main/java/com/scalar/db/io/DateColumn.java +++ b/core/src/main/java/com/scalar/db/io/DateColumn.java @@ -2,7 +2,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ComparisonChain; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.time.LocalDate; import java.util.Comparator; import java.util.Objects; diff --git a/core/src/main/java/com/scalar/db/io/Key.java b/core/src/main/java/com/scalar/db/io/Key.java index a9f8b26f9a..db86f81dac 100644 --- a/core/src/main/java/com/scalar/db/io/Key.java +++ b/core/src/main/java/com/scalar/db/io/Key.java @@ -5,7 +5,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Ordering; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.util.ScalarDbUtils; import java.nio.ByteBuffer; import java.time.Instant; diff --git a/core/src/main/java/com/scalar/db/io/TimeColumn.java b/core/src/main/java/com/scalar/db/io/TimeColumn.java index 18e2cb19e3..bc5d6f3fb7 100644 --- a/core/src/main/java/com/scalar/db/io/TimeColumn.java +++ b/core/src/main/java/com/scalar/db/io/TimeColumn.java @@ -2,7 +2,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ComparisonChain; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.time.LocalTime; import java.util.Comparator; import java.util.Objects; diff --git a/core/src/main/java/com/scalar/db/io/TimestampColumn.java b/core/src/main/java/com/scalar/db/io/TimestampColumn.java index 4e96ff3132..2de8d757ff 100644 --- a/core/src/main/java/com/scalar/db/io/TimestampColumn.java +++ b/core/src/main/java/com/scalar/db/io/TimestampColumn.java @@ -2,7 +2,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ComparisonChain; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.time.LocalDateTime; import java.util.Comparator; import java.util.Objects; diff --git a/core/src/main/java/com/scalar/db/io/TimestampTZColumn.java b/core/src/main/java/com/scalar/db/io/TimestampTZColumn.java index 23f18b3857..e2eb891b12 100644 --- a/core/src/main/java/com/scalar/db/io/TimestampTZColumn.java +++ b/core/src/main/java/com/scalar/db/io/TimestampTZColumn.java @@ -2,7 +2,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ComparisonChain; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneOffset; diff --git a/core/src/main/java/com/scalar/db/service/ProviderManager.java b/core/src/main/java/com/scalar/db/service/ProviderManager.java index c76b989102..fa42e384b7 100644 --- a/core/src/main/java/com/scalar/db/service/ProviderManager.java +++ b/core/src/main/java/com/scalar/db/service/ProviderManager.java @@ -8,7 +8,7 @@ import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.api.DistributedTransactionProvider; import com.scalar.db.api.TwoPhaseCommitTransactionManager; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import java.util.Locale; import java.util.Map; diff --git a/core/src/main/java/com/scalar/db/storage/cassandra/BatchHandler.java b/core/src/main/java/com/scalar/db/storage/cassandra/BatchHandler.java index 36662b7bb2..035f1229d4 100644 --- a/core/src/main/java/com/scalar/db/storage/cassandra/BatchHandler.java +++ b/core/src/main/java/com/scalar/db/storage/cassandra/BatchHandler.java @@ -10,7 +10,7 @@ import com.datastax.driver.core.exceptions.WriteTimeoutException; import com.google.common.annotations.VisibleForTesting; import com.scalar.db.api.Mutation; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; diff --git a/core/src/main/java/com/scalar/db/storage/cassandra/Cassandra.java b/core/src/main/java/com/scalar/db/storage/cassandra/Cassandra.java index 25b066e99a..e8a0930ab1 100644 --- a/core/src/main/java/com/scalar/db/storage/cassandra/Cassandra.java +++ b/core/src/main/java/com/scalar/db/storage/cassandra/Cassandra.java @@ -14,11 +14,11 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Scanner; import com.scalar.db.common.AbstractDistributedStorage; +import com.scalar.db.common.CoreError; import com.scalar.db.common.FilterableScanner; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import java.io.IOException; diff --git a/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java b/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java index 22a4e17f7c..d30bd4bbb9 100644 --- a/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java @@ -23,8 +23,8 @@ import com.scalar.db.api.Scan.Ordering.Order; import com.scalar.db.api.StorageInfo; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoImpl; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; diff --git a/core/src/main/java/com/scalar/db/storage/cassandra/MutateStatementHandler.java b/core/src/main/java/com/scalar/db/storage/cassandra/MutateStatementHandler.java index 2594da421c..026eaebf47 100644 --- a/core/src/main/java/com/scalar/db/storage/cassandra/MutateStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/cassandra/MutateStatementHandler.java @@ -9,7 +9,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement; import com.scalar.db.api.Mutation; import com.scalar.db.api.Operation; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/storage/cassandra/SelectStatementHandler.java b/core/src/main/java/com/scalar/db/storage/cassandra/SelectStatementHandler.java index ed2315e0a5..80b77ca0c2 100644 --- a/core/src/main/java/com/scalar/db/storage/cassandra/SelectStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/cassandra/SelectStatementHandler.java @@ -21,7 +21,7 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.ScanAll; import com.scalar.db.api.Selection; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.Column; import com.scalar.db.io.Key; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/BatchHandler.java b/core/src/main/java/com/scalar/db/storage/cosmos/BatchHandler.java index b72733b95a..25e2ab58dc 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/BatchHandler.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/BatchHandler.java @@ -4,8 +4,8 @@ import com.azure.cosmos.CosmosException; import com.scalar.db.api.Mutation; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/Cosmos.java b/core/src/main/java/com/scalar/db/storage/cosmos/Cosmos.java index 6e24c3a925..84ba08d5bd 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/Cosmos.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/Cosmos.java @@ -14,11 +14,11 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Scanner; import com.scalar.db.common.AbstractDistributedStorage; +import com.scalar.db.common.CoreError; import com.scalar.db.common.FilterableScanner; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import java.io.IOException; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java index 6888eac356..ba04c683a4 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java @@ -26,8 +26,8 @@ import com.scalar.db.api.Scan.Ordering.Order; import com.scalar.db.api.StorageInfo; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoImpl; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosConfig.java b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosConfig.java index b6ce6de2d1..f1794666fe 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosConfig.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosConfig.java @@ -2,7 +2,7 @@ import static com.scalar.db.config.ConfigUtils.getString; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import java.util.Optional; import javax.annotation.Nullable; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosOperationChecker.java b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosOperationChecker.java index 2c23898506..e4599def60 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosOperationChecker.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosOperationChecker.java @@ -8,10 +8,10 @@ import com.scalar.db.api.Put; import com.scalar.db.api.Scan; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.BigIntColumn; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosUtils.java b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosUtils.java index 067d0b0c8c..ce5d31e936 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosUtils.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosUtils.java @@ -4,7 +4,7 @@ import com.azure.cosmos.CosmosClient; import com.azure.cosmos.CosmosClientBuilder; import com.google.common.annotations.VisibleForTesting; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import java.util.Locale; public final class CosmosUtils { diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/MutateStatementHandler.java b/core/src/main/java/com/scalar/db/storage/cosmos/MutateStatementHandler.java index 0cf7c91709..491a2b56c4 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/MutateStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/MutateStatementHandler.java @@ -4,8 +4,8 @@ import com.azure.cosmos.CosmosException; import com.scalar.db.api.Mutation; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/SelectStatementHandler.java b/core/src/main/java/com/scalar/db/storage/cosmos/SelectStatementHandler.java index 187dfa924b..0e467c4cf9 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/SelectStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/SelectStatementHandler.java @@ -14,9 +14,9 @@ import com.scalar.db.api.Scanner; import com.scalar.db.api.Selection; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.EmptyScanner; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.Column; import com.scalar.db.util.ScalarDbUtils; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/BatchHandler.java b/core/src/main/java/com/scalar/db/storage/dynamo/BatchHandler.java index 9ea87e1b5a..baf98dc488 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/BatchHandler.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/BatchHandler.java @@ -8,8 +8,8 @@ import com.scalar.db.api.PutIfExists; import com.scalar.db.api.PutIfNotExists; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/DeleteStatementHandler.java b/core/src/main/java/com/scalar/db/storage/dynamo/DeleteStatementHandler.java index 2a18ecbbc8..6483d5a62b 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/DeleteStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/DeleteStatementHandler.java @@ -6,8 +6,8 @@ import com.scalar.db.api.DeleteIf; import com.scalar.db.api.DeleteIfExists; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/Dynamo.java b/core/src/main/java/com/scalar/db/storage/dynamo/Dynamo.java index 99ecae7c30..8789e2df44 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/Dynamo.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/Dynamo.java @@ -13,11 +13,11 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Scanner; import com.scalar.db.common.AbstractDistributedStorage; +import com.scalar.db.common.CoreError; import com.scalar.db.common.FilterableScanner; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import java.io.IOException; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/DynamoConfig.java b/core/src/main/java/com/scalar/db/storage/dynamo/DynamoConfig.java index d964e0df56..89244cc9b2 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/DynamoConfig.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/DynamoConfig.java @@ -2,7 +2,7 @@ import static com.scalar.db.config.ConfigUtils.getString; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import java.util.Optional; import javax.annotation.Nullable; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/DynamoOperationChecker.java b/core/src/main/java/com/scalar/db/storage/dynamo/DynamoOperationChecker.java index f8b77b8971..99316fae96 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/DynamoOperationChecker.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/DynamoOperationChecker.java @@ -5,11 +5,11 @@ import com.scalar.db.api.Mutation; import com.scalar.db.api.Put; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.ColumnChecker; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/PutStatementHandler.java b/core/src/main/java/com/scalar/db/storage/dynamo/PutStatementHandler.java index 19910819f7..1318c40d04 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/PutStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/PutStatementHandler.java @@ -7,8 +7,8 @@ import com.scalar.db.api.PutIfExists; import com.scalar.db.api.PutIfNotExists; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/SelectStatementHandler.java b/core/src/main/java/com/scalar/db/storage/dynamo/SelectStatementHandler.java index 525b34f536..05bc51110a 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/SelectStatementHandler.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/SelectStatementHandler.java @@ -12,9 +12,9 @@ import com.scalar.db.api.Scanner; import com.scalar.db.api.Selection; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.EmptyScanner; import com.scalar.db.common.TableMetadataManager; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.Column; import com.scalar.db.io.Key; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/bytes/BlobBytesEncoder.java b/core/src/main/java/com/scalar/db/storage/dynamo/bytes/BlobBytesEncoder.java index 066c1e8e2d..d675480f41 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/bytes/BlobBytesEncoder.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/bytes/BlobBytesEncoder.java @@ -3,7 +3,7 @@ import static com.scalar.db.storage.dynamo.bytes.BytesUtils.mask; import com.scalar.db.api.Scan.Ordering.Order; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.BlobColumn; import java.nio.ByteBuffer; import javax.annotation.concurrent.ThreadSafe; diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/bytes/TextBytesEncoder.java b/core/src/main/java/com/scalar/db/storage/dynamo/bytes/TextBytesEncoder.java index f99015f673..77e4459c84 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/bytes/TextBytesEncoder.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/bytes/TextBytesEncoder.java @@ -3,7 +3,7 @@ import static com.scalar.db.storage.dynamo.bytes.BytesUtils.mask; import com.scalar.db.api.Scan.Ordering.Order; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.TextColumn; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java b/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java index d52bbd00de..d4e1331af9 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java @@ -1,6 +1,6 @@ package com.scalar.db.storage.jdbc; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.DataType; import java.sql.JDBCType; import javax.annotation.Nullable; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java index 6400f3f98c..726e8f3665 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java @@ -14,8 +14,8 @@ import com.scalar.db.api.Scan.Ordering.Order; import com.scalar.db.api.StorageInfo; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoImpl; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcConfig.java b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcConfig.java index 9d0aa3529a..d6c78a8158 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcConfig.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcConfig.java @@ -4,7 +4,7 @@ import static com.scalar.db.config.ConfigUtils.getInt; import static com.scalar.db.config.ConfigUtils.getString; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import java.time.LocalDate; import java.time.format.DateTimeFormatter; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcDatabase.java b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcDatabase.java index e238a1aa28..ee2d4dbfe9 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcDatabase.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcDatabase.java @@ -11,10 +11,10 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Scanner; import com.scalar.db.common.AbstractDistributedStorage; +import com.scalar.db.common.CoreError; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcService.java b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcService.java index 5e6fd1f2c1..f672a39f8a 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcService.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcService.java @@ -12,9 +12,9 @@ import com.scalar.db.api.ScanAll; import com.scalar.db.api.Scanner; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.storage.jdbc.query.DeleteQuery; import com.scalar.db.storage.jdbc.query.QueryBuilder; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineDb2.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineDb2.java index 5459098e5f..6556fae609 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineDb2.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineDb2.java @@ -7,7 +7,7 @@ import com.ibm.db2.jcc.DB2BaseDataSource; import com.scalar.db.api.LikeExpression; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import com.scalar.db.io.DateColumn; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineFactory.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineFactory.java index 2bb6c31e31..b8588d6e8b 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineFactory.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineFactory.java @@ -1,6 +1,6 @@ package com.scalar.db.storage.jdbc; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; /** Factory class of subclasses of {@link RdbEngineStrategy} */ public final class RdbEngineFactory { diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java index dd1b52f9ed..9a2c2800c0 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java @@ -3,7 +3,7 @@ import com.google.common.annotations.VisibleForTesting; import com.scalar.db.api.LikeExpression; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import com.scalar.db.io.TimestampTZColumn; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java index 846a7935c2..425697ec4a 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java @@ -5,7 +5,7 @@ import com.google.common.annotations.VisibleForTesting; import com.scalar.db.api.LikeExpression; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import com.scalar.db.storage.jdbc.query.MergeIntoQuery; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java index ce296786ff..28b0b806ea 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java @@ -3,7 +3,7 @@ import static com.scalar.db.util.ScalarDbUtils.getFullTableName; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import com.scalar.db.storage.jdbc.query.InsertOnConflictDoUpdateQuery; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java index 2ada283dcf..c90a697b5c 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java @@ -3,7 +3,7 @@ import com.google.common.collect.ImmutableMap; import com.scalar.db.api.LikeExpression; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import com.scalar.db.storage.jdbc.query.MergeQuery; diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/ScannerImpl.java b/core/src/main/java/com/scalar/db/storage/jdbc/ScannerImpl.java index 44b11c8b0a..18b477bacc 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/ScannerImpl.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/ScannerImpl.java @@ -2,7 +2,7 @@ import com.scalar.db.api.Result; import com.scalar.db.common.AbstractScanner; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.IOException; diff --git a/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorage.java b/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorage.java index 015225b131..9a965e49ee 100644 --- a/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorage.java +++ b/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorage.java @@ -14,7 +14,7 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.Scanner; import com.scalar.db.common.AbstractDistributedStorage; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.service.StorageFactory; diff --git a/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageConfig.java b/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageConfig.java index a352b455e7..863759b8f1 100644 --- a/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageConfig.java +++ b/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageConfig.java @@ -4,7 +4,7 @@ import static com.scalar.db.config.ConfigUtils.getStringArray; import com.google.common.collect.ImmutableMap; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import java.util.Map; import java.util.Properties; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/CommitHandler.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/CommitHandler.java index f220889e3c..b8a35e9157 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/CommitHandler.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/CommitHandler.java @@ -6,7 +6,7 @@ import com.google.errorprone.annotations.concurrent.LazyInit; import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.TransactionState; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; import com.scalar.db.exception.storage.RetriableExecutionException; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommit.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommit.java index 20fcbf89c8..a94fb33307 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommit.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommit.java @@ -14,7 +14,7 @@ import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; import com.scalar.db.common.AbstractDistributedTransaction; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CommitConflictException; import com.scalar.db.exception.transaction.CommitException; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java index 8d86be812e..d9e5d6c313 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java @@ -11,7 +11,7 @@ import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.DistributedTransactionAdmin; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitMutationOperationChecker.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitMutationOperationChecker.java index 77c1224e3c..935c494048 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitMutationOperationChecker.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitMutationOperationChecker.java @@ -13,8 +13,8 @@ import com.scalar.db.api.PutIfExists; import com.scalar.db.api.PutIfNotExists; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.CoreError; import com.scalar.db.common.checker.ConditionChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import javax.annotation.concurrent.ThreadSafe; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitUtils.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitUtils.java index d4f5d18d7a..4ca0f509cc 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitUtils.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitUtils.java @@ -12,7 +12,7 @@ import com.scalar.db.api.UpdateIf; import com.scalar.db.api.UpdateIfExists; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.UnsatisfiedConditionException; import com.scalar.db.io.Column; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/CrudHandler.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/CrudHandler.java index 0dadeaeca4..0c34613e8c 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/CrudHandler.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/CrudHandler.java @@ -23,7 +23,7 @@ import com.scalar.db.api.TableMetadata; import com.scalar.db.api.TransactionCrudOperable; import com.scalar.db.common.AbstractTransactionCrudOperableScanner; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CrudConflictException; import com.scalar.db.exception.transaction.CrudException; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/FilteredResult.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/FilteredResult.java index 7cd1adc2a0..b9318180b8 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/FilteredResult.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/FilteredResult.java @@ -5,7 +5,7 @@ import com.scalar.db.api.Result; import com.scalar.db.api.TableMetadata; import com.scalar.db.common.AbstractResult; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.Column; import com.scalar.db.io.Key; import java.nio.ByteBuffer; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/MutationConditionsValidator.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/MutationConditionsValidator.java index 1423d72b45..2d165cf577 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/MutationConditionsValidator.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/MutationConditionsValidator.java @@ -11,7 +11,7 @@ import com.scalar.db.api.PutIf; import com.scalar.db.api.PutIfExists; import com.scalar.db.api.PutIfNotExists; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.transaction.UnsatisfiedConditionException; import com.scalar.db.io.Column; import java.util.List; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/RecoveryExecutor.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/RecoveryExecutor.java index c240b44d41..4c0079b181 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/RecoveryExecutor.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/RecoveryExecutor.java @@ -10,8 +10,8 @@ import com.scalar.db.api.Selection; import com.scalar.db.api.TableMetadata; import com.scalar.db.api.TransactionState; +import com.scalar.db.common.CoreError; import com.scalar.db.common.ResultImpl; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CrudException; import com.scalar.db.io.BigIntColumn; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/Snapshot.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/Snapshot.java index f01b5397d3..27c66e7bb4 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/Snapshot.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/Snapshot.java @@ -22,7 +22,7 @@ import com.scalar.db.api.Scanner; import com.scalar.db.api.Selection.Conjunction; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CrudException; import com.scalar.db.exception.transaction.PreparationConflictException; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/TransactionTableMetadataManager.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/TransactionTableMetadataManager.java index b77e3a1ee1..1f1c9fb3a9 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/TransactionTableMetadataManager.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/TransactionTableMetadataManager.java @@ -6,7 +6,7 @@ import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.Operation; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.util.ScalarDbUtils; import java.util.Objects; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommit.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommit.java index ee36e31488..f7ba7b3ab2 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommit.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommit.java @@ -14,7 +14,7 @@ import com.scalar.db.api.Update; import com.scalar.db.api.Upsert; import com.scalar.db.common.AbstractTwoPhaseCommitTransaction; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CommitConflictException; import com.scalar.db.exception.transaction.CrudConflictException; diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommitManager.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommitManager.java index cb38e72356..4a0ae7cddf 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommitManager.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/TwoPhaseConsensusCommitManager.java @@ -20,7 +20,7 @@ import com.scalar.db.api.Upsert; import com.scalar.db.common.AbstractTransactionManagerCrudOperableScanner; import com.scalar.db.common.AbstractTwoPhaseCommitTransactionManager; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.transaction.CommitConflictException; import com.scalar.db.exception.transaction.CrudConflictException; diff --git a/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransaction.java b/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransaction.java index b29bf7ae31..b202d1f75b 100644 --- a/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransaction.java +++ b/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransaction.java @@ -21,7 +21,7 @@ import com.scalar.db.api.Upsert; import com.scalar.db.common.AbstractDistributedTransaction; import com.scalar.db.common.AbstractTransactionCrudOperableScanner; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CommitConflictException; import com.scalar.db.exception.transaction.CommitException; diff --git a/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionManager.java b/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionManager.java index 4d0c4b3b71..4031568a83 100644 --- a/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionManager.java +++ b/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionManager.java @@ -18,11 +18,11 @@ import com.scalar.db.api.Upsert; import com.scalar.db.common.AbstractDistributedTransactionManager; import com.scalar.db.common.AbstractTransactionManagerCrudOperableScanner; +import com.scalar.db.common.CoreError; import com.scalar.db.common.ReadOnlyDistributedTransaction; import com.scalar.db.common.StorageInfoProvider; import com.scalar.db.common.TableMetadataManager; import com.scalar.db.common.checker.OperationChecker; -import com.scalar.db.common.error.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.transaction.CommitConflictException; import com.scalar.db.exception.transaction.CrudConflictException; diff --git a/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionManager.java b/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionManager.java index 573aec61f7..39cc6fdfdd 100644 --- a/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionManager.java +++ b/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionManager.java @@ -28,7 +28,7 @@ import com.scalar.db.api.Upsert; import com.scalar.db.common.AbstractDistributedTransactionManager; import com.scalar.db.common.AbstractTransactionManagerCrudOperableScanner; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.config.DatabaseConfig; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.storage.NoMutationException; diff --git a/core/src/main/java/com/scalar/db/util/ScalarDbUtils.java b/core/src/main/java/com/scalar/db/util/ScalarDbUtils.java index a4261b7517..24eefd6fb9 100644 --- a/core/src/main/java/com/scalar/db/util/ScalarDbUtils.java +++ b/core/src/main/java/com/scalar/db/util/ScalarDbUtils.java @@ -22,7 +22,7 @@ import com.scalar.db.api.UpdateIf; import com.scalar.db.api.UpdateIfExists; import com.scalar.db.api.Upsert; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.common.CoreError; import com.scalar.db.io.BigIntColumn; import com.scalar.db.io.BigIntValue; import com.scalar.db.io.BlobColumn; diff --git a/core/src/test/java/com/scalar/db/common/error/CoreErrorTest.java b/core/src/test/java/com/scalar/db/common/error/CoreErrorTest.java index 5ab0c4397e..53cb827ce2 100644 --- a/core/src/test/java/com/scalar/db/common/error/CoreErrorTest.java +++ b/core/src/test/java/com/scalar/db/common/error/CoreErrorTest.java @@ -1,6 +1,7 @@ package com.scalar.db.common.error; import com.scalar.db.api.Put; +import com.scalar.db.common.CoreError; import com.scalar.db.io.Key; import java.util.Arrays; import org.assertj.core.api.Assertions; diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java index 015366258e..c3aaeee3af 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java @@ -6,12 +6,12 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.cli.exception.DirectoryValidationException; import com.scalar.db.dataloader.cli.util.DirectoryUtils; import com.scalar.db.dataloader.cli.util.FileUtils; import com.scalar.db.dataloader.cli.util.InvalidFilePathException; import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.FileFormat; import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.dataloader.core.dataexport.CsvExportManager; @@ -57,9 +57,8 @@ public Integer call() throws Exception { validateOutputDirectory(); FileUtils.validateFilePath(scalarDbPropertiesFilePath); validatePositiveValue( - spec.commandLine(), dataChunkSize, CoreError.DATA_LOADER_INVALID_DATA_CHUNK_SIZE); - validatePositiveValue( - spec.commandLine(), maxThreads, CoreError.DATA_LOADER_INVALID_MAX_THREADS); + spec.commandLine(), dataChunkSize, DataLoaderError.INVALID_DATA_CHUNK_SIZE); + validatePositiveValue(spec.commandLine(), maxThreads, DataLoaderError.INVALID_MAX_THREADS); StorageFactory storageFactory = StorageFactory.create(scalarDbPropertiesFilePath); TableMetadataService metaDataService = @@ -110,8 +109,7 @@ public Integer call() throws Exception { private String getScalarDbPropertiesFilePath() { if (StringUtils.isBlank(configFilePath)) { - throw new IllegalArgumentException( - CoreError.DATA_LOADER_CONFIG_FILE_PATH_BLANK.buildMessage()); + throw new IllegalArgumentException(DataLoaderError.CONFIG_FILE_PATH_BLANK.buildMessage()); } return Objects.equals(configFilePath, DEFAULT_CONFIG_FILE_NAME) ? Paths.get("").toAbsolutePath().resolve(DEFAULT_CONFIG_FILE_NAME).toString() diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverter.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverter.java index 6479089f18..d5eb25674b 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverter.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverter.java @@ -1,8 +1,8 @@ package com.scalar.db.dataloader.cli.command.dataexport; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.cli.util.CommandLineInputUtils; import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.DataLoaderError; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -37,7 +37,7 @@ public class MultiColumnKeyValueConverter public List convert(String keyValue) { if (keyValue == null || keyValue.trim().isEmpty()) { throw new IllegalArgumentException( - CoreError.DATA_LOADER_NULL_OR_EMPTY_KEY_VALUE_INPUT.buildMessage()); + DataLoaderError.NULL_OR_EMPTY_KEY_VALUE_INPUT.buildMessage()); } return Arrays.stream(CommandLineInputUtils.splitByDelimiter(keyValue, ",", 0)) .map(CommandLineInputUtils::parseKeyValue) diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataimport/ImportCommand.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataimport/ImportCommand.java index a505a42ade..bc78514b53 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataimport/ImportCommand.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataimport/ImportCommand.java @@ -5,7 +5,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.FileFormat; import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.dataimport.ImportManager; @@ -55,15 +55,12 @@ public Integer call() throws Exception { validateImportTarget(controlFilePath, namespace, tableName); validateLogDirectory(logDirectory); validatePositiveValue( - spec.commandLine(), dataChunkSize, CoreError.DATA_LOADER_INVALID_DATA_CHUNK_SIZE); + spec.commandLine(), dataChunkSize, DataLoaderError.INVALID_DATA_CHUNK_SIZE); validatePositiveValue( - spec.commandLine(), transactionSize, CoreError.DATA_LOADER_INVALID_TRANSACTION_SIZE); + spec.commandLine(), transactionSize, DataLoaderError.INVALID_TRANSACTION_SIZE); + validatePositiveValue(spec.commandLine(), maxThreads, DataLoaderError.INVALID_MAX_THREADS); validatePositiveValue( - spec.commandLine(), maxThreads, CoreError.DATA_LOADER_INVALID_MAX_THREADS); - validatePositiveValue( - spec.commandLine(), - dataChunkQueueSize, - CoreError.DATA_LOADER_INVALID_DATA_CHUNK_QUEUE_SIZE); + spec.commandLine(), dataChunkQueueSize, DataLoaderError.INVALID_DATA_CHUNK_QUEUE_SIZE); ControlFile controlFile = parseControlFileFromPath(controlFilePath).orElse(null); ImportOptions importOptions = createImportOptions(controlFile); ImportLoggerConfig config = @@ -192,7 +189,7 @@ private void validateImportTarget(String controlFilePath, String namespace, Stri if (StringUtils.isBlank(controlFilePath) && (StringUtils.isBlank(namespace) || StringUtils.isBlank(tableName))) { throw new ParameterException( - spec.commandLine(), CoreError.DATA_LOADER_IMPORT_TARGET_MISSING.buildMessage()); + spec.commandLine(), DataLoaderError.IMPORT_TARGET_MISSING.buildMessage()); } // Make sure the control file exists when a path is provided @@ -201,7 +198,7 @@ private void validateImportTarget(String controlFilePath, String namespace, Stri if (!Files.exists(path)) { throw new ParameterException( spec.commandLine(), - CoreError.DATA_LOADER_MISSING_IMPORT_FILE.buildMessage( + DataLoaderError.MISSING_IMPORT_FILE.buildMessage( controlFilePath, FILE_OPTION_NAME_LONG_FORMAT)); } } @@ -224,7 +221,7 @@ private void validateLogDirectory(String logDirectory) throws ParameterException if (!Files.isWritable(logDirectoryPath)) { throw new ParameterException( spec.commandLine(), - CoreError.DATA_LOADER_LOG_DIRECTORY_CREATION_FAILED.buildMessage( + DataLoaderError.LOG_DIRECTORY_CREATION_FAILED.buildMessage( logDirectoryPath.toAbsolutePath())); } } else { @@ -234,7 +231,7 @@ private void validateLogDirectory(String logDirectory) throws ParameterException } catch (IOException e) { throw new ParameterException( spec.commandLine(), - CoreError.DATA_LOADER_LOG_DIRECTORY_CREATION_FAILED.buildMessage( + DataLoaderError.LOG_DIRECTORY_CREATION_FAILED.buildMessage( logDirectoryPath.toAbsolutePath())); } } @@ -248,7 +245,7 @@ private void validateLogDirectory(String logDirectory) throws ParameterException if (!Files.isWritable(logDirectoryPath)) { throw new ParameterException( spec.commandLine(), - CoreError.DATA_LOADER_LOG_DIRECTORY_WRITE_ACCESS_DENIED.buildMessage( + DataLoaderError.LOG_DIRECTORY_WRITE_ACCESS_DENIED.buildMessage( logDirectoryPath.toAbsolutePath())); } } @@ -271,8 +268,7 @@ private Optional parseControlFileFromPath(String controlFilePath) { return Optional.of(controlFile); } catch (IOException e) { throw new ParameterException( - spec.commandLine(), - CoreError.DATA_LOADER_INVALID_CONTROL_FILE.buildMessage(controlFilePath)); + spec.commandLine(), DataLoaderError.INVALID_CONTROL_FILE.buildMessage(controlFilePath)); } } diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java index 4724bf7024..baea5e1970 100644 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtils.java @@ -1,6 +1,6 @@ package com.scalar.db.dataloader.cli.util; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import java.util.AbstractMap; import java.util.Map; import java.util.Objects; @@ -19,14 +19,14 @@ public class CommandLineInputUtils { public static Map.Entry parseKeyValue(String keyValue) { if (StringUtils.isBlank(keyValue)) { throw new IllegalArgumentException( - CoreError.DATA_LOADER_NULL_OR_EMPTY_KEY_VALUE_INPUT.buildMessage()); + DataLoaderError.NULL_OR_EMPTY_KEY_VALUE_INPUT.buildMessage()); } String[] parts = splitByDelimiter(keyValue, "=", 2); if (parts.length != 2 || parts[0].trim().isEmpty() || parts[1].trim().isEmpty()) { throw new IllegalArgumentException( - CoreError.DATA_LOADER_INVALID_KEY_VALUE_INPUT.buildMessage(keyValue)); + DataLoaderError.INVALID_KEY_VALUE_INPUT.buildMessage(keyValue)); } return new AbstractMap.SimpleEntry<>(parts[0].trim(), parts[1].trim()); } @@ -42,9 +42,8 @@ public static Map.Entry parseKeyValue(String keyValue) { * @throws NullPointerException if value or delimiter is null */ public static String[] splitByDelimiter(String value, String delimiter, int limit) { - Objects.requireNonNull(value, CoreError.DATA_LOADER_SPLIT_INPUT_VALUE_NULL.buildMessage()); - Objects.requireNonNull( - delimiter, CoreError.DATA_LOADER_SPLIT_INPUT_DELIMITER_NULL.buildMessage()); + Objects.requireNonNull(value, DataLoaderError.SPLIT_INPUT_VALUE_NULL.buildMessage()); + Objects.requireNonNull(delimiter, DataLoaderError.SPLIT_INPUT_DELIMITER_NULL.buildMessage()); return value.split(delimiter, limit); } @@ -56,7 +55,8 @@ public static String[] splitByDelimiter(String value, String delimiter, int limi * @param value the integer value to validate * @param error the error that is thrown when the value is invalid */ - public static void validatePositiveValue(CommandLine commandLine, int value, CoreError error) { + public static void validatePositiveValue( + CommandLine commandLine, int value, DataLoaderError error) { if (value < 1) { throw new CommandLine.ParameterException(commandLine, error.buildMessage()); } diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/DirectoryUtils.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/DirectoryUtils.java index a9a14ca4d7..48737f97e9 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/DirectoryUtils.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/DirectoryUtils.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.cli.util; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.cli.exception.DirectoryValidationException; +import com.scalar.db.dataloader.core.DataLoaderError; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -26,7 +26,7 @@ public static void validateWorkingDirectory() throws DirectoryValidationExceptio // Check if the current working directory is writable if (!Files.isWritable(workingDirectoryPath)) { throw new DirectoryValidationException( - CoreError.DATA_LOADER_DIRECTORY_WRITE_ACCESS.buildMessage( + DataLoaderError.DIRECTORY_WRITE_ACCESS.buildMessage( workingDirectoryPath.toAbsolutePath())); } } @@ -42,7 +42,7 @@ public static void validateOrCreateTargetDirectory(String directoryPath) throws DirectoryValidationException { if (StringUtils.isBlank(directoryPath)) { throw new IllegalArgumentException( - CoreError.DATA_LOADER_MISSING_DIRECTORY_NOT_ALLOWED.buildMessage()); + DataLoaderError.MISSING_DIRECTORY_NOT_ALLOWED.buildMessage()); } Path path = Paths.get(directoryPath); @@ -50,12 +50,11 @@ public static void validateOrCreateTargetDirectory(String directoryPath) if (Files.exists(path)) { if (!Files.isDirectory(path)) { throw new DirectoryValidationException( - CoreError.DATA_LOADER_PATH_IS_NOT_A_DIRECTORY.buildMessage(path)); + DataLoaderError.PATH_IS_NOT_A_DIRECTORY.buildMessage(path)); } if (!Files.isWritable(path)) { throw new DirectoryValidationException( - CoreError.DATA_LOADER_DIRECTORY_WRITE_ACCESS_NOT_ALLOWED.buildMessage( - path.toAbsolutePath())); + DataLoaderError.DIRECTORY_WRITE_ACCESS_NOT_ALLOWED.buildMessage(path.toAbsolutePath())); } } else { @@ -64,7 +63,7 @@ public static void validateOrCreateTargetDirectory(String directoryPath) Files.createDirectories(path); } catch (IOException e) { throw new DirectoryValidationException( - CoreError.DATA_LOADER_DIRECTORY_CREATE_FAILED.buildMessage( + DataLoaderError.DIRECTORY_CREATE_FAILED.buildMessage( path.toAbsolutePath(), e.getMessage())); } } diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/FileUtils.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/FileUtils.java index ecaf341409..4e54695e20 100755 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/FileUtils.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/util/FileUtils.java @@ -1,6 +1,6 @@ package com.scalar.db.dataloader.cli.util; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import java.nio.file.Path; import java.nio.file.Paths; import org.apache.commons.lang3.StringUtils; @@ -15,13 +15,12 @@ public class FileUtils { */ public static void validateFilePath(String filePath) throws InvalidFilePathException { if (StringUtils.isBlank(filePath)) { - throw new IllegalArgumentException(CoreError.DATA_LOADER_FILE_PATH_IS_BLANK.buildMessage()); + throw new IllegalArgumentException(DataLoaderError.FILE_PATH_IS_BLANK.buildMessage()); } Path pathToCheck = Paths.get(filePath); if (!pathToCheck.toFile().exists()) { - throw new InvalidFilePathException( - CoreError.DATA_LOADER_FILE_NOT_FOUND.buildMessage(pathToCheck)); + throw new InvalidFilePathException(DataLoaderError.FILE_NOT_FOUND.buildMessage(pathToCheck)); } } } diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommandTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommandTest.java index a8ba19d494..56518f8e71 100755 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommandTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommandTest.java @@ -2,7 +2,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.FileFormat; import java.io.File; import java.nio.file.Paths; @@ -39,7 +39,7 @@ void call_withBlankScalarDBConfigurationFile_shouldThrowException() { exportCommand::call, "Expected to throw FileNotFound exception as configuration path is invalid"); Assertions.assertEquals( - CoreError.DATA_LOADER_CONFIG_FILE_PATH_BLANK.buildMessage(), thrown.getMessage()); + DataLoaderError.CONFIG_FILE_PATH_BLANK.buildMessage(), thrown.getMessage()); } @Test diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverterTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverterTest.java index 89adb8f077..149a8acb34 100755 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverterTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/MultiColumnKeyValueConverterTest.java @@ -2,8 +2,8 @@ import static org.junit.jupiter.api.Assertions.assertThrows; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.DataLoaderError; import java.util.Collections; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -21,7 +21,7 @@ void convert_withInvalidValue_ShouldThrowError() { () -> multiColumnKeyValueConverter.convert(value), "Expected to throw exception"); Assertions.assertEquals( - CoreError.DATA_LOADER_INVALID_KEY_VALUE_INPUT.buildMessage("id 15"), thrown.getMessage()); + DataLoaderError.INVALID_KEY_VALUE_INPUT.buildMessage("id 15"), thrown.getMessage()); } @Test diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java index 50167ccd41..a865b6ec72 100755 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/command/dataexport/ScanOrderingConverterTest.java @@ -3,7 +3,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import com.scalar.db.api.Scan; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -22,7 +22,7 @@ void callConvert_withInvalidValue_shouldThrowException() { () -> scanOrderingConverter.convert(value), "Expected to throw exception"); Assertions.assertEquals( - CoreError.DATA_LOADER_INVALID_KEY_VALUE_INPUT.buildMessage(value), thrown.getMessage()); + DataLoaderError.INVALID_KEY_VALUE_INPUT.buildMessage(value), thrown.getMessage()); } @Test diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtilsTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtilsTest.java index 49e735b522..3a07c267d5 100644 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtilsTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/CommandLineInputUtilsTest.java @@ -7,7 +7,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import java.util.Map; import org.junit.jupiter.api.Test; import picocli.CommandLine; @@ -84,9 +84,7 @@ void splitByDelimiter_nullValue_shouldThrowException() { assertThrows( NullPointerException.class, () -> CommandLineInputUtils.splitByDelimiter(null, "=", 2)); assertTrue( - exception - .getMessage() - .contains(CoreError.DATA_LOADER_SPLIT_INPUT_VALUE_NULL.buildMessage())); + exception.getMessage().contains(DataLoaderError.SPLIT_INPUT_VALUE_NULL.buildMessage())); } @Test @@ -96,9 +94,7 @@ void splitByDelimiter_nullDelimiter_shouldThrowException() { NullPointerException.class, () -> CommandLineInputUtils.splitByDelimiter("a=b", null, 2)); assertTrue( - exception - .getMessage() - .contains(CoreError.DATA_LOADER_SPLIT_INPUT_DELIMITER_NULL.buildMessage())); + exception.getMessage().contains(DataLoaderError.SPLIT_INPUT_DELIMITER_NULL.buildMessage())); } @Test @@ -111,7 +107,7 @@ public void validatePositiveValue_positiveValue_shouldNotThrowException() { assertDoesNotThrow( () -> CommandLineInputUtils.validatePositiveValue( - commandLine, positiveValue, CoreError.DATA_LOADER_INVALID_DATA_CHUNK_SIZE)); + commandLine, positiveValue, DataLoaderError.INVALID_DATA_CHUNK_SIZE)); } @Test @@ -124,7 +120,7 @@ public void validatePositiveValue_one_shouldNotThrowException() { assertDoesNotThrow( () -> CommandLineInputUtils.validatePositiveValue( - commandLine, minimumPositiveValue, CoreError.DATA_LOADER_INVALID_DATA_CHUNK_SIZE)); + commandLine, minimumPositiveValue, DataLoaderError.INVALID_DATA_CHUNK_SIZE)); } @Test @@ -132,7 +128,7 @@ public void validatePositiveValue_zero_shouldThrowException() { // Arrange CommandLine commandLine = mock(CommandLine.class); int zeroValue = 0; - CoreError error = CoreError.DATA_LOADER_INVALID_DATA_CHUNK_SIZE; + DataLoaderError error = DataLoaderError.INVALID_DATA_CHUNK_SIZE; // Act & Assert CommandLine.ParameterException exception = @@ -149,7 +145,7 @@ public void validatePositiveValue_negativeValue_shouldThrowException() { // Arrange CommandLine commandLine = mock(CommandLine.class); int negativeValue = -5; - CoreError error = CoreError.DATA_LOADER_INVALID_TRANSACTION_SIZE; + DataLoaderError error = DataLoaderError.INVALID_TRANSACTION_SIZE; // Act & Assert CommandLine.ParameterException exception = @@ -173,9 +169,9 @@ public void validatePositiveValue_differentErrorTypes_shouldUseCorrectErrorMessa CommandLine.ParameterException.class, () -> CommandLineInputUtils.validatePositiveValue( - commandLine, negativeValue, CoreError.DATA_LOADER_INVALID_MAX_THREADS)); + commandLine, negativeValue, DataLoaderError.INVALID_MAX_THREADS)); assertTrue( - exception1.getMessage().contains(CoreError.DATA_LOADER_INVALID_MAX_THREADS.buildMessage())); + exception1.getMessage().contains(DataLoaderError.INVALID_MAX_THREADS.buildMessage())); // Act & Assert for DATA_LOADER_INVALID_DATA_CHUNK_QUEUE_SIZE CommandLine.ParameterException exception2 = @@ -183,12 +179,10 @@ public void validatePositiveValue_differentErrorTypes_shouldUseCorrectErrorMessa CommandLine.ParameterException.class, () -> CommandLineInputUtils.validatePositiveValue( - commandLine, - negativeValue, - CoreError.DATA_LOADER_INVALID_DATA_CHUNK_QUEUE_SIZE)); + commandLine, negativeValue, DataLoaderError.INVALID_DATA_CHUNK_QUEUE_SIZE)); assertTrue( exception2 .getMessage() - .contains(CoreError.DATA_LOADER_INVALID_DATA_CHUNK_QUEUE_SIZE.buildMessage())); + .contains(DataLoaderError.INVALID_DATA_CHUNK_QUEUE_SIZE.buildMessage())); } } diff --git a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/FileUtilsTest.java b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/FileUtilsTest.java index 550fbd32f9..4095f1bcf3 100755 --- a/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/FileUtilsTest.java +++ b/data-loader/cli/src/test/java/com/scalar/db/dataloader/cli/util/FileUtilsTest.java @@ -2,7 +2,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import java.nio.file.Paths; import org.junit.jupiter.api.Test; @@ -21,13 +21,13 @@ void validateFilePath_withValidFilePath_shouldNotThrowException() void validateFilePath_withInvalidFilePath_shouldThrowException() { assertThatThrownBy(() -> FileUtils.validateFilePath(currentPath + "/demo")) .isInstanceOf(InvalidFilePathException.class) - .hasMessage(CoreError.DATA_LOADER_FILE_NOT_FOUND.buildMessage(currentPath + "/demo")); + .hasMessage(DataLoaderError.FILE_NOT_FOUND.buildMessage(currentPath + "/demo")); } @Test void validateFilePath_withBlankFilePath_shouldThrowException() { assertThatThrownBy(() -> FileUtils.validateFilePath("")) .isInstanceOf(IllegalArgumentException.class) - .hasMessage(CoreError.DATA_LOADER_FILE_PATH_IS_BLANK.buildMessage()); + .hasMessage(DataLoaderError.FILE_PATH_IS_BLANK.buildMessage()); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderError.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderError.java new file mode 100644 index 0000000000..d150d4df00 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/DataLoaderError.java @@ -0,0 +1,292 @@ +package com.scalar.db.dataloader.core; + +import com.scalar.db.common.error.Category; +import com.scalar.db.common.error.ScalarDbError; + +public enum DataLoaderError implements ScalarDbError { + + // + // Errors for the user error category + // + INVALID_DATA_CHUNK_QUEUE_SIZE( + Category.USER_ERROR, "0000", "Data chunk queue size must be greater than 0", "", ""), + DIRECTORY_WRITE_ACCESS_NOT_ALLOWED( + Category.USER_ERROR, + "0001", + "The directory '%s' does not have write permissions. Please ensure that the current user has write access to the directory", + "", + ""), + DIRECTORY_CREATE_FAILED( + Category.USER_ERROR, + "0002", + "Failed to create the directory '%s'. Please check if you have sufficient permissions and if there are any file system restrictions. Details: %s", + "", + ""), + MISSING_DIRECTORY_NOT_ALLOWED( + Category.USER_ERROR, "0003", "Directory path cannot be null or empty", "", ""), + MISSING_FILE_EXTENSION( + Category.USER_ERROR, + "0004", + "No file extension was found in the provided file name %s", + "", + ""), + INVALID_FILE_EXTENSION( + Category.USER_ERROR, + "0005", + "Invalid file extension: %s. Allowed extensions are: %s", + "", + ""), + INVALID_COLUMN_NON_EXISTENT( + Category.USER_ERROR, + "0006", + "Invalid key: Column %s does not exist in the table %s in namespace %s", + "", + ""), + INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE( + Category.USER_ERROR, + "0007", + "Invalid base64 encoding for blob value '%s' for column %s in table %s in namespace %s", + "", + ""), + INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE( + Category.USER_ERROR, + "0008", + "Invalid number '%s' specified for column %s in table %s in namespace %s", + "", + ""), + ERROR_METHOD_NULL_ARGUMENT( + Category.USER_ERROR, "0009", "Method null argument not allowed", "", ""), + CLUSTERING_KEY_NOT_FOUND( + Category.USER_ERROR, "0010", "The provided clustering key %s was not found", "", ""), + INVALID_PROJECTION(Category.USER_ERROR, "0011", "The column '%s' was not found", "", ""), + INCOMPLETE_PARTITION_KEY( + Category.USER_ERROR, + "0012", + "The provided partition key is incomplete. Required key: %s", + "", + ""), + CLUSTERING_KEY_ORDER_MISMATCH( + Category.USER_ERROR, + "0013", + "The provided clustering-key order does not match the table schema. Required order: %s", + "", + ""), + PARTITION_KEY_ORDER_MISMATCH( + Category.USER_ERROR, + "0014", + "The provided partition-key order does not match the table schema. Required order: %s", + "", + ""), + MISSING_NAMESPACE_OR_TABLE( + Category.USER_ERROR, "0015", "Missing namespace or table: %s, %s", "", ""), + TABLE_METADATA_RETRIEVAL_FAILED( + Category.USER_ERROR, "0016", "Failed to retrieve table metadata. Details: %s", "", ""), + DUPLICATE_DATA_MAPPINGS( + Category.USER_ERROR, + "0017", + "Duplicate data mappings found for table '%s' in the control file", + "", + ""), + MISSING_COLUMN_MAPPING( + Category.USER_ERROR, + "0018", + "No mapping found for column '%s' in table '%s' in the control file. Control file validation set at 'FULL'. All columns need to be mapped", + "", + ""), + CONTROL_FILE_MISSING_DATA_MAPPINGS( + Category.USER_ERROR, "0019", "The control file is missing data mappings", "", ""), + TARGET_COLUMN_NOT_FOUND( + Category.USER_ERROR, + "0020", + "The target column '%s' for source field '%s' could not be found in table '%s'", + "", + ""), + MISSING_PARTITION_KEY( + Category.USER_ERROR, + "0021", + "The required partition key '%s' is missing in the control file mapping for table '%s'", + "", + ""), + MISSING_CLUSTERING_KEY( + Category.USER_ERROR, + "0022", + "The required clustering key '%s' is missing in the control file mapping for table '%s'", + "", + ""), + MULTIPLE_MAPPINGS_FOR_COLUMN_FOUND( + Category.USER_ERROR, + "0023", + "Duplicated data mappings found for column '%s' in table '%s'", + "", + ""), + MISSING_CLUSTERING_KEY_COLUMN( + Category.USER_ERROR, + "0024", + "Missing required field or column mapping for clustering key %s", + "", + ""), + MISSING_PARTITION_KEY_COLUMN( + Category.USER_ERROR, + "0025", + "Missing required field or column mapping for partition key %s", + "", + ""), + MISSING_COLUMN(Category.USER_ERROR, "0026", "Missing field or column mapping for %s", "", ""), + VALUE_TO_STRING_CONVERSION_FAILED( + Category.USER_ERROR, + "0027", + "Something went wrong while converting the ScalarDB values to strings. The table metadata and Value datatype probably do not match. Details: %s", + "", + ""), + FILE_FORMAT_NOT_SUPPORTED( + Category.USER_ERROR, "0028", "The provided file format is not supported : %s", "", ""), + COULD_NOT_FIND_PARTITION_KEY( + Category.USER_ERROR, "0029", "Could not find the partition key", "", ""), + UPSERT_INSERT_MISSING_COLUMNS( + Category.USER_ERROR, + "0030", + "The source record needs to contain all fields if the UPSERT turns into an INSERT", + "", + ""), + DATA_ALREADY_EXISTS(Category.USER_ERROR, "0031", "Record already exists", "", ""), + DATA_NOT_FOUND(Category.USER_ERROR, "0032", "Record was not found", "", ""), + COULD_NOT_FIND_CLUSTERING_KEY( + Category.USER_ERROR, "0033", "Could not find the clustering key", "", ""), + TABLE_METADATA_MISSING(Category.USER_ERROR, "0034", "No table metadata found", "", ""), + MISSING_SOURCE_FIELD( + Category.USER_ERROR, + "0035", + "The data mapping source field '%s' for table '%s' is missing in the JSON data record", + "", + ""), + CSV_DATA_MISMATCH( + Category.USER_ERROR, "0036", "The CSV row: %s does not match header: %s", "", ""), + JSON_CONTENT_START_ERROR( + Category.USER_ERROR, "0037", "Expected JSON file content to be an array", "", ""), + IMPORT_TARGET_MISSING( + Category.USER_ERROR, + "0038", + "Missing option: either the '--namespace' and '--table' options or the '--control-file' option must be specified", + "", + ""), + MISSING_IMPORT_FILE( + Category.USER_ERROR, + "0039", + "The file '%s' specified by the argument '%s' does not exist", + "", + ""), + LOG_DIRECTORY_WRITE_ACCESS_DENIED( + Category.USER_ERROR, "0040", "Cannot write to the log directory: %s", "", ""), + LOG_DIRECTORY_CREATION_FAILED( + Category.USER_ERROR, "0041", "Failed to create the log directory: %s", "", ""), + INVALID_CONTROL_FILE(Category.USER_ERROR, "0042", "Failed to parse the control file: %s", "", ""), + DIRECTORY_WRITE_ACCESS( + Category.USER_ERROR, + "0043", + "No permission to create or write files in the directory: %s", + "", + ""), + DIRECTORY_CREATION_FAILED( + Category.USER_ERROR, "0044", "Failed to create the directory: %s", "", ""), + PATH_IS_NOT_A_DIRECTORY( + Category.USER_ERROR, "0045", "Path exists but is not a directory: %s", "", ""), + FILE_PATH_IS_BLANK(Category.USER_ERROR, "0046", "File path must not be blank", "", ""), + FILE_NOT_FOUND(Category.USER_ERROR, "0047", "File not found: %s", "", ""), + INVALID_DATE_TIME_FOR_COLUMN_VALUE( + Category.USER_ERROR, + "0048", + "Invalid date time value '%s' specified for column %s in table %s in namespace %s", + "", + ""), + NULL_OR_EMPTY_KEY_VALUE_INPUT( + Category.USER_ERROR, "0049", "Key value cannot be null or empty", "", ""), + INVALID_KEY_VALUE_INPUT(Category.USER_ERROR, "0050", "Invalid key-value format: %s", "", ""), + SPLIT_INPUT_VALUE_NULL(Category.USER_ERROR, "0051", "Value must not be null", "", ""), + SPLIT_INPUT_DELIMITER_NULL(Category.USER_ERROR, "0052", "Delimiter must not be null", "", ""), + CONFIG_FILE_PATH_BLANK(Category.USER_ERROR, "0053", "Config file path must not be blank", "", ""), + INVALID_DATA_CHUNK_SIZE( + Category.USER_ERROR, "0054", "Data chunk size must be greater than 0", "", ""), + INVALID_TRANSACTION_SIZE( + Category.USER_ERROR, "0055", "Transaction size must be greater than 0", "", ""), + INVALID_MAX_THREADS( + Category.USER_ERROR, "0056", "Number of max threads must be greater than 0", "", ""), + + // + // Errors for the internal error category + // + ERROR_CRUD_EXCEPTION( + Category.INTERNAL_ERROR, + "0000", + "A problem occurred while trying to save the data. Details: %s", + "", + ""), + ERROR_SCAN( + Category.INTERNAL_ERROR, + "0001", + "A problem occurred while scanning. Are you sure you are running in the correct transaction mode? Details: %s", + "", + ""), + CSV_FILE_READ_FAILED( + Category.INTERNAL_ERROR, "0002", "Failed to read CSV file. Details: %s", "", ""), + CSV_FILE_HEADER_READ_FAILED( + Category.INTERNAL_ERROR, "0003", "Failed to CSV read header line. Details: %s", "", ""), + DATA_CHUNK_PROCESS_FAILED( + Category.INTERNAL_ERROR, + "0004", + "Data chunk processing was interrupted. Details: %s", + "", + ""), + JSON_FILE_READ_FAILED( + Category.INTERNAL_ERROR, "0005", "Failed to read JSON file. Details: %s", "", ""), + JSONLINES_FILE_READ_FAILED( + Category.INTERNAL_ERROR, "0006", "Failed to read JSON Lines file. Details: %s", "", ""), + ; + + private static final String COMPONENT_NAME = "DB-DATA-LOADER"; + + private final Category category; + private final String id; + private final String message; + private final String cause; + private final String solution; + + DataLoaderError(Category category, String id, String message, String cause, String solution) { + validate(COMPONENT_NAME, category, id, message, cause, solution); + + this.category = category; + this.id = id; + this.message = message; + this.cause = cause; + this.solution = solution; + } + + @Override + public String getComponentName() { + return COMPONENT_NAME; + } + + @Override + public Category getCategory() { + return category; + } + + @Override + public String getId() { + return id; + } + + @Override + public String getMessage() { + return message; + } + + @Override + public String getCause() { + return cause; + } + + @Override + public String getSolution() { + return solution; + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java index 7ba86c1bef..2a37c7c507 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/CsvProducerTask.java @@ -2,7 +2,7 @@ import com.scalar.db.api.Result; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.util.CsvUtil; import com.scalar.db.dataloader.core.util.DecimalUtil; import com.scalar.db.io.DataType; @@ -108,8 +108,7 @@ private String convertResultToCsv(Result result) { return stringBuilder.toString(); } catch (UnsupportedOperationException e) { - logger.error( - CoreError.DATA_LOADER_VALUE_TO_STRING_CONVERSION_FAILED.buildMessage(e.getMessage())); + logger.error(DataLoaderError.VALUE_TO_STRING_CONVERSION_FAILED.buildMessage(e.getMessage())); } return ""; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java index 2339ab1366..21268e016d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/producer/ProducerTaskFactory.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core.dataexport.producer; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.FileFormat; import com.scalar.db.io.DataType; import java.util.List; @@ -54,7 +54,7 @@ public ProducerTask createProducerTask( break; default: throw new IllegalArgumentException( - CoreError.DATA_LOADER_FILE_FORMAT_NOT_SUPPORTED.buildMessage(fileFormat.toString())); + DataLoaderError.FILE_FORMAT_NOT_SUPPORTED.buildMessage(fileFormat.toString())); } return producerTask; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java index 1a04071600..f7869ca4c0 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidator.java @@ -2,7 +2,7 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.dataloader.core.dataexport.ExportOptions; import com.scalar.db.io.Column; @@ -61,7 +61,7 @@ private static void validatePartitionKey(LinkedHashSet partitionKeyNames // Make sure that all partition key columns are provided if (partitionKeyNames.size() != key.getColumns().size()) { throw new ExportOptionsValidationException( - CoreError.DATA_LOADER_INCOMPLETE_PARTITION_KEY.buildMessage(partitionKeyNames)); + DataLoaderError.INCOMPLETE_PARTITION_KEY.buildMessage(partitionKeyNames)); } // Check if the order of columns in key.getColumns() matches the order in partitionKeyNames @@ -71,7 +71,7 @@ private static void validatePartitionKey(LinkedHashSet partitionKeyNames if (!partitionKeyIterator.hasNext() || !partitionKeyIterator.next().equals(column.getName())) { throw new ExportOptionsValidationException( - CoreError.DATA_LOADER_PARTITION_KEY_ORDER_MISMATCH.buildMessage(partitionKeyNames)); + DataLoaderError.PARTITION_KEY_ORDER_MISMATCH.buildMessage(partitionKeyNames)); } } } @@ -113,7 +113,7 @@ private static void validateClusteringKey(LinkedHashSet clusteringKeyNam // it indicates a mismatch if (!clusteringKeyIterator.hasNext()) { throw new ExportOptionsValidationException( - CoreError.DATA_LOADER_CLUSTERING_KEY_ORDER_MISMATCH.buildMessage(clusteringKeyNames)); + DataLoaderError.CLUSTERING_KEY_ORDER_MISMATCH.buildMessage(clusteringKeyNames)); } // Get the next expected clustering key name @@ -122,7 +122,7 @@ private static void validateClusteringKey(LinkedHashSet clusteringKeyNam // Check if the current column name matches the expected clustering key name if (!column.getName().equals(expectedKey)) { throw new ExportOptionsValidationException( - CoreError.DATA_LOADER_CLUSTERING_KEY_ORDER_MISMATCH.buildMessage(clusteringKeyNames)); + DataLoaderError.CLUSTERING_KEY_ORDER_MISMATCH.buildMessage(clusteringKeyNames)); } } } @@ -136,7 +136,7 @@ private static void checkIfColumnExistsAsClusteringKey( if (!clusteringKeyNames.contains(columnName)) { throw new ExportOptionsValidationException( - CoreError.DATA_LOADER_CLUSTERING_KEY_NOT_FOUND.buildMessage(columnName)); + DataLoaderError.CLUSTERING_KEY_NOT_FOUND.buildMessage(columnName)); } } @@ -150,7 +150,7 @@ private static void validateProjectionColumns( for (String column : columns) { if (!columnNames.contains(column)) { throw new ExportOptionsValidationException( - CoreError.DATA_LOADER_INVALID_PROJECTION.buildMessage(column)); + DataLoaderError.INVALID_PROJECTION.buildMessage(column)); } } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidator.java index 862ec3015d..be1227439e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidator.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core.dataimport.controlfile; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.util.RuntimeUtil; import com.scalar.db.dataloader.core.util.TableMetadataUtil; import java.util.HashSet; @@ -40,7 +40,7 @@ public static void validate( // Make sure that multiple table mappings for one table do not exist if (uniqueTables.contains(lookupKey)) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_DUPLICATE_DATA_MAPPINGS.buildMessage(lookupKey)); + DataLoaderError.DUPLICATE_DATA_MAPPINGS.buildMessage(lookupKey)); } uniqueTables.add(lookupKey); @@ -88,7 +88,7 @@ private static void checkIfAllColumnsAreMapped( for (String columnName : columnNames) { if (!mappedTargetColumns.contains(columnName)) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_MISSING_COLUMN_MAPPING.buildMessage( + DataLoaderError.MISSING_COLUMN_MAPPING.buildMessage( columnName, TableMetadataUtil.getTableLookupKey(controlFileTable))); } } @@ -105,7 +105,7 @@ private static void checkEmptyMappings(ControlFile controlFile) // Make sure data mapping for at least one table is provided if (controlFile.getTables().isEmpty()) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_CONTROL_FILE_MISSING_DATA_MAPPINGS.buildMessage()); + DataLoaderError.CONTROL_FILE_MISSING_DATA_MAPPINGS.buildMessage()); } } @@ -124,7 +124,7 @@ private static void checkMultiTableMetadata( String lookupKey = TableMetadataUtil.getTableLookupKey(controlFileTable); if (!tableMetadataMap.containsKey(lookupKey)) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage( + DataLoaderError.MISSING_NAMESPACE_OR_TABLE.buildMessage( controlFileTable.getNamespace(), controlFileTable.getTable())); } } @@ -147,7 +147,7 @@ private static void checkIfTargetColumnExist( // Make sure the target fields are found in the table metadata if (!columnNames.contains(mapping.getTargetColumn())) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_TARGET_COLUMN_NOT_FOUND.buildMessage( + DataLoaderError.TARGET_COLUMN_NOT_FOUND.buildMessage( mapping.getTargetColumn(), mapping.getSourceField(), lookupKey)); } } @@ -171,7 +171,7 @@ private static void checkPartitionKeys( for (String partitionKeyName : partitionKeyNames) { if (!mappedTargetColumns.contains(partitionKeyName)) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_MISSING_PARTITION_KEY.buildMessage( + DataLoaderError.MISSING_PARTITION_KEY.buildMessage( partitionKeyName, TableMetadataUtil.getTableLookupKey(controlFileTable))); } } @@ -195,7 +195,7 @@ private static void checkClusteringKeys( for (String clusteringKeyName : clusteringKeyNames) { if (!mappedTargetColumns.contains(clusteringKeyName)) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY.buildMessage( + DataLoaderError.MISSING_CLUSTERING_KEY.buildMessage( clusteringKeyName, TableMetadataUtil.getTableLookupKey(controlFileTable))); } } @@ -215,7 +215,7 @@ private static Set getTargetColumnSet(ControlFileTable controlFileTable) for (ControlFileTableFieldMapping mapping : controlFileTable.getMappings()) { if (!mappedTargetColumns.add(mapping.getTargetColumn())) { throw new ControlFileValidationException( - CoreError.DATA_LOADER_MULTIPLE_MAPPINGS_FOR_COLUMN_FOUND.buildMessage( + DataLoaderError.MULTIPLE_MAPPINGS_FOR_COLUMN_FOUND.buildMessage( mapping.getTargetColumn(), TableMetadataUtil.getTableLookupKey(controlFileTable))); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java index 8066141ec2..afd7b124af 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java @@ -10,7 +10,7 @@ import com.scalar.db.api.Scan; import com.scalar.db.api.ScanBuilder; import com.scalar.db.api.Scanner; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.exception.transaction.CrudException; @@ -110,7 +110,7 @@ public void put( transaction.put(put); } catch (CrudException e) { throw new ScalarDbDaoException( - CoreError.DATA_LOADER_ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); + DataLoaderError.ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); } } @@ -138,7 +138,7 @@ public void put( storage.put(put); } catch (ExecutionException e) { throw new ScalarDbDaoException( - CoreError.DATA_LOADER_ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); + DataLoaderError.ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); } } @@ -175,8 +175,7 @@ public List scan( return scanner.all(); } } catch (ExecutionException | IOException e) { - throw new ScalarDbDaoException( - CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); + throw new ScalarDbDaoException(DataLoaderError.ERROR_SCAN.buildMessage(e.getMessage()), e); } } @@ -215,8 +214,7 @@ public List scan( } catch (CrudException | NoSuchElementException e) { // No such element Exception is thrown when the scan is done in transaction mode but // ScalarDB is running in storage mode - throw new ScalarDbDaoException( - CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); + throw new ScalarDbDaoException(DataLoaderError.ERROR_SCAN.buildMessage(e.getMessage()), e); } } @@ -243,8 +241,7 @@ public Scanner createScanner( try { return storage.scan(scan); } catch (ExecutionException e) { - throw new ScalarDbDaoException( - CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); + throw new ScalarDbDaoException(DataLoaderError.ERROR_SCAN.buildMessage(e.getMessage()), e); } } @@ -277,8 +274,7 @@ public Scanner createScanner( try { return storage.scan(scan); } catch (ExecutionException e) { - throw new ScalarDbDaoException( - CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); + throw new ScalarDbDaoException(DataLoaderError.ERROR_SCAN.buildMessage(e.getMessage()), e); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessor.java index 468465396e..42ac102094 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessor.java @@ -2,7 +2,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.DataLoaderObjectMapper; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunk; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportRow; @@ -83,7 +83,7 @@ protected void readDataChunks( String[] dataArray = line.split(delimiter); if (headerArray.length != dataArray.length) { throw new IllegalArgumentException( - CoreError.DATA_LOADER_CSV_DATA_MISMATCH.buildMessage(line, header)); + DataLoaderError.CSV_DATA_MISMATCH.buildMessage(line, header)); } JsonNode jsonNode = combineHeaderAndData(headerArray, dataArray); if (jsonNode.isEmpty()) continue; @@ -97,7 +97,7 @@ protected void readDataChunks( if (!currentDataChunk.isEmpty()) enqueueDataChunk(currentDataChunk, dataChunkQueue); } catch (IOException | InterruptedException e) { throw new RuntimeException( - CoreError.DATA_LOADER_CSV_FILE_READ_FAILED.buildMessage(e.getMessage()), e); + DataLoaderError.CSV_FILE_READ_FAILED.buildMessage(e.getMessage()), e); } } @@ -126,7 +126,7 @@ private String safeReadLine(BufferedReader reader) { return reader.readLine(); } catch (IOException e) { throw new UncheckedIOException( - CoreError.DATA_LOADER_CSV_FILE_HEADER_READ_FAILED.buildMessage(e.getMessage()), e); + DataLoaderError.CSV_FILE_HEADER_READ_FAILED.buildMessage(e.getMessage()), e); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/DefaultImportProcessorFactory.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/DefaultImportProcessorFactory.java index d40222d9a7..098b5aa606 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/DefaultImportProcessorFactory.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/DefaultImportProcessorFactory.java @@ -1,6 +1,6 @@ package com.scalar.db.dataloader.core.dataimport.processor; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; /** * A factory class that creates appropriate ImportProcessor instances based on the input file @@ -39,7 +39,7 @@ public ImportProcessor createImportProcessor(ImportProcessorParams params) { break; default: throw new IllegalArgumentException( - CoreError.DATA_LOADER_FILE_FORMAT_NOT_SUPPORTED.buildMessage( + DataLoaderError.FILE_FORMAT_NOT_SUPPORTED.buildMessage( params.getImportOptions().getFileFormat().toString())); } return importProcessor; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java index 11b54c93b6..81daf9646e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core.dataimport.processor; import com.scalar.db.api.DistributedTransaction; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.dataimport.ImportEventListener; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunk; @@ -106,7 +106,7 @@ public void process(int dataChunkSize, int transactionBatchSize, BufferedReader } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException( - CoreError.DATA_LOADER_DATA_CHUNK_PROCESS_FAILED.buildMessage(e.getMessage()), e); + DataLoaderError.DATA_CHUNK_PROCESS_FAILED.buildMessage(e.getMessage()), e); } finally { shutdownExecutorGracefully(dataChunkReaderExecutor); shutdownExecutorGracefully(dataChunkProcessorExecutor); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java index c435f0f13d..1cbaef113f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessor.java @@ -4,7 +4,7 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.JsonNode; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.DataLoaderObjectMapper; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunk; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportRow; @@ -67,7 +67,7 @@ protected void readDataChunks( BufferedReader reader, int dataChunkSize, BlockingQueue dataChunkQueue) { try (JsonParser jsonParser = new JsonFactory().createParser(reader)) { if (jsonParser.nextToken() != JsonToken.START_ARRAY) { - throw new IOException(CoreError.DATA_LOADER_JSON_CONTENT_START_ERROR.buildMessage()); + throw new IOException(DataLoaderError.JSON_CONTENT_START_ERROR.buildMessage()); } List currentDataChunk = new ArrayList<>(); @@ -85,7 +85,7 @@ protected void readDataChunks( if (!currentDataChunk.isEmpty()) enqueueDataChunk(currentDataChunk, dataChunkQueue); } catch (IOException | InterruptedException e) { throw new RuntimeException( - CoreError.DATA_LOADER_JSON_FILE_READ_FAILED.buildMessage(e.getMessage()), e); + DataLoaderError.JSON_FILE_READ_FAILED.buildMessage(e.getMessage()), e); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessor.java index d7ec63004e..6c2d80d991 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessor.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core.dataimport.processor; import com.fasterxml.jackson.databind.JsonNode; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.DataLoaderObjectMapper; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunk; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportRow; @@ -74,7 +74,7 @@ protected void readDataChunks( } catch (IOException | InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException( - CoreError.DATA_LOADER_JSONLINES_FILE_READ_FAILED.buildMessage(e.getMessage()), e); + DataLoaderError.JSONLINES_FILE_READ_FAILED.buildMessage(e.getMessage()), e); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java index e0b24e5aa4..128854e18b 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java @@ -3,7 +3,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.scalar.db.api.Result; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.dataimport.ImportMode; import com.scalar.db.dataloader.core.dataimport.ImportOptions; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFile; @@ -117,7 +117,7 @@ private List startMultiTableImportProcess( if (!mutableSourceRecord.has(mapping.getSourceField()) && !mutableSourceRecord.has(mapping.getTargetColumn())) { String errorMessage = - CoreError.DATA_LOADER_MISSING_SOURCE_FIELD.buildMessage( + DataLoaderError.MISSING_SOURCE_FIELD.buildMessage( mapping.getSourceField(), controlFileTable.getTable()); ImportTargetResult targetResult = @@ -182,9 +182,7 @@ private ImportTargetResult importIntoSingleTable( .namespace(namespace) .tableName(table) .status(ImportTargetResultStatus.VALIDATION_FAILED) - .errors( - Collections.singletonList( - CoreError.DATA_LOADER_TABLE_METADATA_MISSING.buildMessage())) + .errors(Collections.singletonList(DataLoaderError.TABLE_METADATA_MISSING.buildMessage())) .build(); } @@ -224,7 +222,7 @@ private ImportTargetResult importIntoSingleTable( .status(ImportTargetResultStatus.VALIDATION_FAILED) .errors( Collections.singletonList( - CoreError.DATA_LOADER_COULD_NOT_FIND_PARTITION_KEY.buildMessage())) + DataLoaderError.COULD_NOT_FIND_PARTITION_KEY.buildMessage())) .build(); } Optional optionalClusteringKey = Optional.empty(); @@ -239,7 +237,7 @@ private ImportTargetResult importIntoSingleTable( .status(ImportTargetResultStatus.VALIDATION_FAILED) .errors( Collections.singletonList( - CoreError.DATA_LOADER_COULD_NOT_FIND_CLUSTERING_KEY.buildMessage())) + DataLoaderError.COULD_NOT_FIND_CLUSTERING_KEY.buildMessage())) .build(); } } @@ -274,7 +272,7 @@ && shouldRevalidateMissingColumns(importOptions, checkForMissingColumns)) { .status(ImportTargetResultStatus.MISSING_COLUMNS) .errors( Collections.singletonList( - CoreError.DATA_LOADER_UPSERT_INSERT_MISSING_COLUMNS.buildMessage())) + DataLoaderError.UPSERT_INSERT_MISSING_COLUMNS.buildMessage())) .build(); } } @@ -286,8 +284,7 @@ && shouldRevalidateMissingColumns(importOptions, checkForMissingColumns)) { .importedRecord(mutableSourceRecord) .importAction(importAction) .status(ImportTargetResultStatus.DATA_ALREADY_EXISTS) - .errors( - Collections.singletonList(CoreError.DATA_LOADER_DATA_ALREADY_EXISTS.buildMessage())) + .errors(Collections.singletonList(DataLoaderError.DATA_ALREADY_EXISTS.buildMessage())) .build(); } @@ -298,7 +295,7 @@ && shouldRevalidateMissingColumns(importOptions, checkForMissingColumns)) { .importedRecord(mutableSourceRecord) .importAction(importAction) .status(ImportTargetResultStatus.DATA_NOT_FOUND) - .errors(Collections.singletonList(CoreError.DATA_LOADER_DATA_NOT_FOUND.buildMessage())) + .errors(Collections.singletonList(DataLoaderError.DATA_NOT_FOUND.buildMessage())) .build(); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java index 3fbc5ffc38..d68d2238d5 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java @@ -2,7 +2,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.DatabaseKeyType; import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils; import java.util.Set; @@ -72,8 +72,8 @@ public static void checkMissingKeys( if (!sourceRecord.has(columnName)) { String errorMessageFormat = keyType == DatabaseKeyType.PARTITION - ? CoreError.DATA_LOADER_MISSING_PARTITION_KEY_COLUMN.buildMessage(columnName) - : CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN.buildMessage(columnName); + ? DataLoaderError.MISSING_PARTITION_KEY_COLUMN.buildMessage(columnName) + : DataLoaderError.MISSING_CLUSTERING_KEY_COLUMN.buildMessage(columnName); validationResult.addErrorMessage(columnName, errorMessageFormat); } } @@ -100,7 +100,7 @@ public static void checkMissingColumns( && !ConsensusCommitUtils.isTransactionMetaColumn(columnName, tableMetadata) && !sourceRecord.has(columnName)) { validationResult.addErrorMessage( - columnName, CoreError.DATA_LOADER_MISSING_COLUMN.buildMessage(columnName)); + columnName, DataLoaderError.MISSING_COLUMN.buildMessage(columnName)); } } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java index f91435fe5f..70d49a51ee 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataService.java @@ -2,7 +2,7 @@ import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.util.TableMetadataUtil; import com.scalar.db.exception.storage.ExecutionException; import java.util.Collection; @@ -35,12 +35,12 @@ public TableMetadata getTableMetadata(String namespace, String tableName) TableMetadata tableMetadata = storageAdmin.getTableMetadata(namespace, tableName); if (tableMetadata == null) { throw new TableMetadataException( - CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage(namespace, tableName)); + DataLoaderError.MISSING_NAMESPACE_OR_TABLE.buildMessage(namespace, tableName)); } return tableMetadata; } catch (ExecutionException e) { throw new TableMetadataException( - CoreError.DATA_LOADER_TABLE_METADATA_RETRIEVAL_FAILED.buildMessage(e.getMessage()), e); + DataLoaderError.TABLE_METADATA_RETRIEVAL_FAILED.buildMessage(e.getMessage()), e); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java index 90001ed062..397d468a61 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java @@ -3,8 +3,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.scalar.db.api.Result; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.core.ColumnInfo; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.exception.Base64Exception; import com.scalar.db.dataloader.core.exception.ColumnParsingException; import com.scalar.db.io.BigIntColumn; @@ -133,17 +133,17 @@ public static Column createColumnFromValue( } } catch (NumberFormatException e) { throw new ColumnParsingException( - CoreError.DATA_LOADER_INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE.buildMessage( + DataLoaderError.INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE.buildMessage( value, columnName, columnInfo.getTableName(), columnInfo.getNamespace()), e); } catch (DateTimeParseException e) { throw new ColumnParsingException( - CoreError.DATA_LOADER_INVALID_DATE_TIME_FOR_COLUMN_VALUE.buildMessage( + DataLoaderError.INVALID_DATE_TIME_FOR_COLUMN_VALUE.buildMessage( value, columnName, columnInfo.getTableName(), columnInfo.getNamespace()), e); } catch (IllegalArgumentException e) { throw new ColumnParsingException( - CoreError.DATA_LOADER_INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE.buildMessage( + DataLoaderError.INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE.buildMessage( value, columnName, columnInfo.getTableName(), columnInfo.getNamespace()), e); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java index f20e013052..1f5c66cdb9 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java @@ -3,9 +3,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.core.ColumnInfo; import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.exception.Base64Exception; import com.scalar.db.dataloader.core.exception.ColumnParsingException; import com.scalar.db.dataloader.core.exception.KeyParsingException; @@ -116,7 +116,7 @@ public static Key parseKeyValue( DataType columnDataType = tableMetadata.getColumnDataType(columnName); if (columnDataType == null) { throw new KeyParsingException( - CoreError.DATA_LOADER_INVALID_COLUMN_NON_EXISTENT.buildMessage( + DataLoaderError.INVALID_COLUMN_NON_EXISTENT.buildMessage( columnName, tableName, namespace)); } ColumnInfo columnInfo = diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/RuntimeUtil.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/RuntimeUtil.java index 870e70285a..c402e251c3 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/RuntimeUtil.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/RuntimeUtil.java @@ -1,6 +1,6 @@ package com.scalar.db.dataloader.core.util; -import static com.scalar.db.common.error.CoreError.DATA_LOADER_ERROR_METHOD_NULL_ARGUMENT; +import com.scalar.db.dataloader.core.DataLoaderError; /** Utils for runtime checks */ public class RuntimeUtil { @@ -14,7 +14,7 @@ public class RuntimeUtil { public static void checkNotNull(Object... values) { for (Object value : values) { if (value == null) { - throw new NullPointerException(DATA_LOADER_ERROR_METHOD_NULL_ARGUMENT.buildMessage()); + throw new NullPointerException(DataLoaderError.ERROR_METHOD_NULL_ARGUMENT.buildMessage()); } } } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/DataLoaderErrorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/DataLoaderErrorTest.java new file mode 100644 index 0000000000..c3034e854e --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/DataLoaderErrorTest.java @@ -0,0 +1,14 @@ +package com.scalar.db.dataloader.core; + +import java.util.Arrays; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +public class DataLoaderErrorTest { + + @Test + public void checkDuplicateErrorCode() { + Assertions.assertThat(Arrays.stream(DataLoaderError.values()).map(DataLoaderError::buildCode)) + .doesNotHaveDuplicates(); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidatorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidatorTest.java index b36522a0fc..2335c6bf67 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidatorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/validation/ExportOptionsValidatorTest.java @@ -3,7 +3,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.FileFormat; import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.dataloader.core.dataexport.ExportOptions; @@ -97,7 +97,7 @@ void validate_withIncompletePartitionKeyForSinglePk_ShouldThrowException() { assertThatThrownBy(() -> ExportOptionsValidator.validate(exportOptions, singlePkCkMetadata)) .isInstanceOf(ExportOptionsValidationException.class) .hasMessage( - CoreError.DATA_LOADER_INCOMPLETE_PARTITION_KEY.buildMessage( + DataLoaderError.INCOMPLETE_PARTITION_KEY.buildMessage( singlePkCkMetadata.getPartitionKeyNames())); } @@ -111,7 +111,7 @@ void validate_withIncompletePartitionKeyForMultiplePks_ShouldThrowException() { assertThatThrownBy(() -> ExportOptionsValidator.validate(exportOptions, multiplePkCkMetadata)) .isInstanceOf(ExportOptionsValidationException.class) .hasMessage( - CoreError.DATA_LOADER_INCOMPLETE_PARTITION_KEY.buildMessage( + DataLoaderError.INCOMPLETE_PARTITION_KEY.buildMessage( multiplePkCkMetadata.getPartitionKeyNames())); } @@ -128,7 +128,7 @@ void validate_withInvalidProjectionColumn_ShouldThrowException() { assertThatThrownBy(() -> ExportOptionsValidator.validate(exportOptions, singlePkCkMetadata)) .isInstanceOf(ExportOptionsValidationException.class) - .hasMessage(CoreError.DATA_LOADER_INVALID_PROJECTION.buildMessage("invalid_column")); + .hasMessage(DataLoaderError.INVALID_PROJECTION.buildMessage("invalid_column")); } @Test @@ -147,7 +147,7 @@ void validate_withInvalidClusteringKeyInScanRange_ShouldThrowException() { assertThatThrownBy(() -> ExportOptionsValidator.validate(exportOptions, singlePkCkMetadata)) .isInstanceOf(ExportOptionsValidationException.class) - .hasMessage(CoreError.DATA_LOADER_CLUSTERING_KEY_ORDER_MISMATCH.buildMessage("[ck1]")); + .hasMessage(DataLoaderError.CLUSTERING_KEY_ORDER_MISMATCH.buildMessage("[ck1]")); } @Test @@ -173,8 +173,7 @@ void validate_withInvalidPartitionKeyOrder_ShouldThrowException() { // Verify that the validator throws the correct exception assertThatThrownBy(() -> ExportOptionsValidator.validate(exportOptions, multiplePkCkMetadata)) .isInstanceOf(ExportOptionsValidationException.class) - .hasMessage( - CoreError.DATA_LOADER_PARTITION_KEY_ORDER_MISMATCH.buildMessage(partitionKeyNames)); + .hasMessage(DataLoaderError.PARTITION_KEY_ORDER_MISMATCH.buildMessage(partitionKeyNames)); } private Key createValidPartitionKey() { diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidatorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidatorTest.java index d5dbd654cf..59660c4234 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidatorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/controlfile/ControlFileValidatorTest.java @@ -3,7 +3,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.util.TableMetadataUtil; import com.scalar.db.io.DataType; import java.util.HashMap; @@ -24,7 +24,7 @@ class ControlFileValidatorTest { void validate_nullValuesGiven_shouldThrowNullPointerException() { assertThatThrownBy(() -> ControlFileValidator.validate(null, null, null)) .isExactlyInstanceOf(NullPointerException.class) - .hasMessage(CoreError.DATA_LOADER_ERROR_METHOD_NULL_ARGUMENT.buildMessage()); + .hasMessage(DataLoaderError.ERROR_METHOD_NULL_ARGUMENT.buildMessage()); } @Test @@ -36,7 +36,7 @@ void validate_noTableMappingsGiven_shouldThrowControlFileValidationException() { ControlFileValidator.validate( controlFile, ControlFileValidationLevel.FULL, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) - .hasMessage(CoreError.DATA_LOADER_CONTROL_FILE_MISSING_DATA_MAPPINGS.buildMessage()); + .hasMessage(DataLoaderError.CONTROL_FILE_MISSING_DATA_MAPPINGS.buildMessage()); } @Test @@ -62,7 +62,7 @@ void validate_duplicateTableMappingsGiven_shouldThrowControlFileValidationExcept ControlFileValidator.validate( controlFile, ControlFileValidationLevel.MAPPED, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) - .hasMessage(CoreError.DATA_LOADER_DUPLICATE_DATA_MAPPINGS.buildMessage(lookupKey)); + .hasMessage(DataLoaderError.DUPLICATE_DATA_MAPPINGS.buildMessage(lookupKey)); } @Test @@ -90,8 +90,7 @@ void validate_duplicateTableColumnMappingsGiven_shouldThrowControlFileValidation controlFile, ControlFileValidationLevel.MAPPED, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MULTIPLE_MAPPINGS_FOR_COLUMN_FOUND.buildMessage( - COLUMN_ONE, lookupKey)); + DataLoaderError.MULTIPLE_MAPPINGS_FOR_COLUMN_FOUND.buildMessage(COLUMN_ONE, lookupKey)); } @Test @@ -109,7 +108,7 @@ void validate_missingTableMetadataGiven_shouldThrowControlFileValidationExceptio controlFile, ControlFileValidationLevel.MAPPED, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage( + DataLoaderError.MISSING_NAMESPACE_OR_TABLE.buildMessage( controlFileTable.getNamespace(), controlFileTable.getTable())); } @@ -136,7 +135,7 @@ void validate_nonExistingTargetColumnGiven_shouldThrowControlFileValidationExcep controlFile, ControlFileValidationLevel.MAPPED, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_TARGET_COLUMN_NOT_FOUND.buildMessage( + DataLoaderError.TARGET_COLUMN_NOT_FOUND.buildMessage( COLUMN_ONE, COLUMN_ONE, lookupKey)); } @@ -165,8 +164,7 @@ void validate_nonExistingTargetColumnGiven_shouldThrowControlFileValidationExcep controlFile, ControlFileValidationLevel.FULL, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_COLUMN_MAPPING.buildMessage( - COLUMN_PARTITION_KEY, lookupKey)); + DataLoaderError.MISSING_COLUMN_MAPPING.buildMessage(COLUMN_PARTITION_KEY, lookupKey)); } @Test @@ -194,8 +192,7 @@ void validate_nonExistingTargetColumnGiven_shouldThrowControlFileValidationExcep controlFile, ControlFileValidationLevel.KEYS, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_PARTITION_KEY.buildMessage( - COLUMN_PARTITION_KEY, lookupKey)); + DataLoaderError.MISSING_PARTITION_KEY.buildMessage(COLUMN_PARTITION_KEY, lookupKey)); } @Test @@ -228,8 +225,7 @@ void validate_nonExistingTargetColumnGiven_shouldThrowControlFileValidationExcep controlFile, ControlFileValidationLevel.KEYS, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY.buildMessage( - COLUMN_CLUSTERING_KEY, lookupKey)); + DataLoaderError.MISSING_CLUSTERING_KEY.buildMessage(COLUMN_CLUSTERING_KEY, lookupKey)); } @Test @@ -357,8 +353,7 @@ void validate_fullValidationAndValidArgumentsGiven_shouldNotThrowException() ControlFileValidator.validate( controlFile, ControlFileValidationLevel.FULL, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) - .hasMessage( - CoreError.DATA_LOADER_MISSING_COLUMN_MAPPING.buildMessage(COLUMN_ONE, lookupKeyTwo)); + .hasMessage(DataLoaderError.MISSING_COLUMN_MAPPING.buildMessage(COLUMN_ONE, lookupKeyTwo)); } @Test @@ -400,7 +395,7 @@ void validate_fullValidationAndValidArgumentsGiven_shouldNotThrowException() controlFile, ControlFileValidationLevel.KEYS, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY.buildMessage( + DataLoaderError.MISSING_CLUSTERING_KEY.buildMessage( COLUMN_CLUSTERING_KEY, lookupKeyTwo)); } @@ -440,8 +435,7 @@ void validate_fullValidationAndValidArgumentsGiven_shouldNotThrowException() controlFile, ControlFileValidationLevel.KEYS, tableMetadataMap)) .isExactlyInstanceOf(ControlFileValidationException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_PARTITION_KEY.buildMessage( - COLUMN_PARTITION_KEY, lookupKeyTwo)); + DataLoaderError.MISSING_PARTITION_KEY.buildMessage(COLUMN_PARTITION_KEY, lookupKeyTwo)); } @Test diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java index 65a85b3c3d..f81a815889 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java @@ -2,7 +2,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.UnitTestUtils; import java.util.HashSet; import java.util.Set; @@ -81,7 +81,7 @@ class ImportSourceRecordValidatorTest { partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, false, mockMetadata); Assertions.assertFalse(result.getColumnsWithErrors().isEmpty()); Assertions.assertEquals( - CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN.buildMessage("id1"), + DataLoaderError.MISSING_CLUSTERING_KEY_COLUMN.buildMessage("id1"), result.getErrorMessages().get(0)); } } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java index 9bcd06bf9b..ff9d5d8b0c 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java @@ -4,7 +4,7 @@ import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.exception.storage.ExecutionException; import java.util.Collections; @@ -48,6 +48,6 @@ void getTableMetadata_withInvalidNamespaceAndTable_shouldThrowException() { tableMetadataService.getTableMetadata(Collections.singleton(tableMetadataRequest))) .isInstanceOf(TableMetadataException.class) .hasMessage( - CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage("namespace2", "table2")); + DataLoaderError.MISSING_NAMESPACE_OR_TABLE.buildMessage("namespace2", "table2")); } } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java index 2ecd782fa6..178809eef5 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java @@ -7,8 +7,8 @@ import com.scalar.db.api.Result; import com.scalar.db.api.TableMetadata; import com.scalar.db.common.ResultImpl; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.core.ColumnInfo; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.exception.Base64Exception; import com.scalar.db.dataloader.core.exception.ColumnParsingException; @@ -160,7 +160,7 @@ void createColumnFromValue_invalidNumberFormat_throwsNumberFormatException() { ColumnParsingException.class, () -> ColumnUtils.createColumnFromValue(DataType.INT, columnInfo, value)); assertEquals( - CoreError.DATA_LOADER_INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE.buildMessage( + DataLoaderError.INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE.buildMessage( value, columnName, "table", "ns"), exception.getMessage()); } @@ -180,7 +180,7 @@ void createColumnFromValue_invalidBase64_throwsBase64Exception() { ColumnParsingException.class, () -> ColumnUtils.createColumnFromValue(DataType.BLOB, columnInfo, value)); assertEquals( - CoreError.DATA_LOADER_INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE.buildMessage( + DataLoaderError.INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE.buildMessage( value, columnName, "table", "ns"), exception.getMessage()); } @@ -199,7 +199,7 @@ void createColumnFromValue_invalidDateTimeFormat_throwsDateTimeParseException() ColumnParsingException.class, () -> ColumnUtils.createColumnFromValue(DataType.TIMESTAMP, columnInfo, value)); assertEquals( - CoreError.DATA_LOADER_INVALID_DATE_TIME_FOR_COLUMN_VALUE.buildMessage( + DataLoaderError.INVALID_DATE_TIME_FOR_COLUMN_VALUE.buildMessage( value, columnName, "table", "ns"), exception.getMessage()); } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java index 9379349eb4..b8739fda58 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java @@ -7,9 +7,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.core.ColumnInfo; import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.DataLoaderError; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.exception.ColumnParsingException; import com.scalar.db.dataloader.core.exception.KeyParsingException; @@ -69,8 +69,7 @@ void parseKeyValue_invalidColumnName_throwsKeyParsingException() { KeyParsingException.class, () -> KeyUtils.parseKeyValue(keyValue, "namespace", "table", tableMetadata)); assertEquals( - CoreError.DATA_LOADER_INVALID_COLUMN_NON_EXISTENT.buildMessage( - columnName, "table", "namespace"), + DataLoaderError.INVALID_COLUMN_NON_EXISTENT.buildMessage(columnName, "table", "namespace"), exception.getMessage()); } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/RuntimeUtilTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/RuntimeUtilTest.java index 8b03c0c0ab..73b73b353a 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/RuntimeUtilTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/RuntimeUtilTest.java @@ -1,8 +1,8 @@ package com.scalar.db.dataloader.core.util; -import static com.scalar.db.common.error.CoreError.DATA_LOADER_ERROR_METHOD_NULL_ARGUMENT; import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import com.scalar.db.dataloader.core.DataLoaderError; import org.junit.jupiter.api.Test; /** RuntimeUtils unit tests */ @@ -12,7 +12,7 @@ class RuntimeUtilTest { void checkNotNull_HasNullValues_ShouldThrowException() { assertThatThrownBy(() -> RuntimeUtil.checkNotNull(null, null)) .isExactlyInstanceOf(NullPointerException.class) - .hasMessage(DATA_LOADER_ERROR_METHOD_NULL_ARGUMENT.buildMessage()); + .hasMessage(DataLoaderError.ERROR_METHOD_NULL_ARGUMENT.buildMessage()); } @Test diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java index 5c114de096..df70aae313 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java @@ -4,7 +4,6 @@ import com.google.common.collect.ImmutableSet; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.scalar.db.common.error.CoreError; import com.scalar.db.io.DataType; import java.util.Map; import java.util.Map.Entry; @@ -25,8 +24,8 @@ public ImportTableSchema( String[] fullName = tableFullName.split("\\.", -1); if (fullName.length != 2) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_TABLE_NAME_MUST_CONTAIN_NAMESPACE_AND_TABLE - .buildMessage(tableFullName)); + SchemaLoaderError.PARSE_ERROR_TABLE_NAME_MUST_CONTAIN_NAMESPACE_AND_TABLE.buildMessage( + tableFullName)); } namespace = fullName[0]; tableName = fullName[1]; @@ -52,7 +51,7 @@ private ImmutableMap parseOverrideColumnsType( DataType dataType = TableSchema.DATA_MAP_TYPE.get(columnDataType.toUpperCase()); if (dataType == null) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_INVALID_COLUMN_TYPE.buildMessage( + SchemaLoaderError.PARSE_ERROR_INVALID_COLUMN_TYPE.buildMessage( tableFullName, columnName, column.getValue().getAsString())); } columnsBuilder.put(columnName, dataType); diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoader.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoader.java index 36747986de..00b86dff52 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoader.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoader.java @@ -3,7 +3,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.gson.JsonParseException; -import com.scalar.db.common.error.CoreError; import com.scalar.db.schemaloader.command.CassandraCommand; import com.scalar.db.schemaloader.command.CosmosCommand; import com.scalar.db.schemaloader.command.DynamoCommand; @@ -903,7 +902,7 @@ static SchemaOperator getSchemaOperator(Either config) return new SchemaOperator(config.getLeft()); } catch (IOException e) { throw new SchemaLoaderException( - CoreError.SCHEMA_LOADER_READING_CONFIG_FILE_FAILED.buildMessage( + SchemaLoaderError.READING_CONFIG_FILE_FAILED.buildMessage( config.getLeft().toAbsolutePath()), e); } @@ -922,7 +921,7 @@ private static List getTableSchemaList( return schemaParser.parse(); } catch (IllegalArgumentException | IllegalStateException | JsonParseException e) { throw new SchemaLoaderException( - CoreError.SCHEMA_LOADER_PARSING_SCHEMA_JSON_FAILED.buildMessage(e.getMessage()), e); + SchemaLoaderError.PARSING_SCHEMA_JSON_FAILED.buildMessage(e.getMessage()), e); } } return Collections.emptyList(); @@ -939,7 +938,7 @@ static SchemaParser getSchemaParser(Either schema, Map getImportTableSchemaList( return schemaParser.parse(); } catch (IllegalArgumentException | IllegalStateException | JsonParseException e) { throw new SchemaLoaderException( - CoreError.SCHEMA_LOADER_PARSING_SCHEMA_JSON_FAILED.buildMessage(e.getMessage()), e); + SchemaLoaderError.PARSING_SCHEMA_JSON_FAILED.buildMessage(e.getMessage()), e); } } return Collections.emptyList(); @@ -974,7 +973,7 @@ static ImportSchemaParser getImportSchemaParser( return new ImportSchemaParser(schema.getLeft(), options); } catch (IOException e) { throw new SchemaLoaderException( - CoreError.SCHEMA_LOADER_READING_SCHEMA_FILE_FAILED.buildMessage( + SchemaLoaderError.READING_SCHEMA_FILE_FAILED.buildMessage( schema.getLeft().toAbsolutePath()), e); } diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoaderError.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoaderError.java new file mode 100644 index 0000000000..2f0e5e8dd6 --- /dev/null +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaLoaderError.java @@ -0,0 +1,141 @@ +package com.scalar.db.schemaloader; + +import com.scalar.db.common.error.Category; +import com.scalar.db.common.error.ScalarDbError; + +public enum SchemaLoaderError implements ScalarDbError { + + // + // Errors for the user error category + // + TABLE_NOT_FOUND(Category.USER_ERROR, "0000", "The table does not exist. Table: %s", "", ""), + ALTERING_PARTITION_KEYS_NOT_SUPPORTED( + Category.USER_ERROR, + "0001", + "The partition keys for the table %s.%s were modified, but altering partition keys is not supported", + "", + ""), + ALTERING_CLUSTERING_KEYS_NOT_SUPPORTED( + Category.USER_ERROR, + "0002", + "The clustering keys for the table %s.%s were modified, but altering clustering keys is not supported", + "", + ""), + ALTERING_CLUSTERING_ORDER_NOT_SUPPORTED( + Category.USER_ERROR, + "0003", + "The clustering order of the table %s.%s were modified, but altering the clustering order is not supported", + "", + ""), + DELETING_COLUMN_NOT_SUPPORTED( + Category.USER_ERROR, + "0004", + "The column %s in the table %s.%s has been deleted. Column deletion is not supported when altering a table", + "", + ""), + ALTERING_COLUMN_DATA_TYPE_NOT_SUPPORTED( + Category.USER_ERROR, + "0005", + "The data type for the column %s in the table %s.%s was modified, but altering data types is not supported", + "", + ""), + SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_REPAIR_ALL( + Category.USER_ERROR, + "0006", + "Specifying the '--schema-file' option is required when using the '--repair-all' option", + "", + ""), + SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_ALTER( + Category.USER_ERROR, + "0007", + "Specifying the '--schema-file' option is required when using the '--alter' option", + "", + ""), + SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_IMPORT( + Category.USER_ERROR, + "0008", + "Specifying the '--schema-file' option is required when using the '--import' option", + "", + ""), + SPECIFYING_COORDINATOR_WITH_IMPORT_NOT_ALLOWED( + Category.USER_ERROR, + "0009", + "Specifying the '--coordinator' option with the '--import' option is not allowed." + + " Create Coordinator tables separately", + "", + ""), + READING_CONFIG_FILE_FAILED( + Category.USER_ERROR, "0010", "Reading the configuration file failed. File: %s", "", ""), + READING_SCHEMA_FILE_FAILED( + Category.USER_ERROR, "0011", "Reading the schema file failed. File: %s", "", ""), + PARSING_SCHEMA_JSON_FAILED( + Category.USER_ERROR, "0012", "Parsing the schema JSON failed. Details: %s", "", ""), + PARSE_ERROR_TABLE_NAME_MUST_CONTAIN_NAMESPACE_AND_TABLE( + Category.USER_ERROR, + "0013", + "The table name must contain the namespace and the table. Table: %s", + "", + ""), + PARSE_ERROR_PARTITION_KEY_MUST_BE_SPECIFIED( + Category.USER_ERROR, "0014", "The partition key must be specified. Table: %s", "", ""), + PARSE_ERROR_INVALID_CLUSTERING_KEY_FORMAT( + Category.USER_ERROR, + "0015", + "Invalid clustering-key format. The clustering key must be in the format of 'column_name' or 'column_name ASC/DESC'." + + " Table: %s; Clustering key: %s", + "", + ""), + PARSE_ERROR_COLUMNS_NOT_SPECIFIED( + Category.USER_ERROR, "0016", "Columns must be specified. Table: %s", "", ""), + PARSE_ERROR_INVALID_COLUMN_TYPE( + Category.USER_ERROR, "0017", "Invalid column type. Table: %s; Column: %s; Type: %s", "", ""), + ; + + private static final String COMPONENT_NAME = "DB-SCHEMA-LOADER"; + + private final Category category; + private final String id; + private final String message; + private final String cause; + private final String solution; + + SchemaLoaderError(Category category, String id, String message, String cause, String solution) { + validate(COMPONENT_NAME, category, id, message, cause, solution); + + this.category = category; + this.id = id; + this.message = message; + this.cause = cause; + this.solution = solution; + } + + @Override + public String getComponentName() { + return COMPONENT_NAME; + } + + @Override + public Category getCategory() { + return category; + } + + @Override + public String getId() { + return id; + } + + @Override + public String getMessage() { + return message; + } + + @Override + public String getCause() { + return cause; + } + + @Override + public String getSolution() { + return solution; + } +} diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java index b586b2b6fa..8ae7a8f31d 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java @@ -5,7 +5,6 @@ import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.DistributedTransactionAdmin; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; import com.scalar.db.schemaloader.alteration.TableMetadataAlteration; @@ -339,7 +338,7 @@ public void alterTables(List tableSchemaList, Map o try { if (!tableExists(namespace, table, isTransactional)) { throw new IllegalArgumentException( - CoreError.TABLE_NOT_FOUND.buildMessage( + SchemaLoaderError.TABLE_NOT_FOUND.buildMessage( ScalarDbUtils.getFullTableName(namespace, table))); } TableMetadata currentMetadata = getCurrentTableMetadata(namespace, table, isTransactional); diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/TableSchema.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/TableSchema.java index 26cfa58af6..775ebdcd4f 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/TableSchema.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/TableSchema.java @@ -7,7 +7,6 @@ import com.google.gson.JsonObject; import com.scalar.db.api.Scan.Ordering.Order; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.io.DataType; import com.scalar.db.storage.cassandra.CassandraAdmin; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; @@ -65,8 +64,8 @@ public TableSchema( String[] fullName = tableFullName.split("\\.", -1); if (fullName.length < 2) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_TABLE_NAME_MUST_CONTAIN_NAMESPACE_AND_TABLE - .buildMessage(tableFullName)); + SchemaLoaderError.PARSE_ERROR_TABLE_NAME_MUST_CONTAIN_NAMESPACE_AND_TABLE.buildMessage( + tableFullName)); } namespace = fullName[0]; tableName = fullName[1]; @@ -80,7 +79,7 @@ protected TableMetadata buildTableMetadata(String tableFullName, JsonObject tabl // Add partition keys if (!tableDefinition.keySet().contains(PARTITION_KEY)) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_PARTITION_KEY_MUST_BE_SPECIFIED.buildMessage( + SchemaLoaderError.PARSE_ERROR_PARTITION_KEY_MUST_BE_SPECIFIED.buildMessage( tableFullName)); } JsonArray partitionKeys = tableDefinition.get(PARTITION_KEY).getAsJsonArray(); @@ -108,7 +107,7 @@ protected TableMetadata buildTableMetadata(String tableFullName, JsonObject tabl tableBuilder.addClusteringKey(clusteringKey, ORDER_MAP.get(order.toUpperCase())); } else { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_INVALID_CLUSTERING_KEY_FORMAT.buildMessage( + SchemaLoaderError.PARSE_ERROR_INVALID_CLUSTERING_KEY_FORMAT.buildMessage( tableFullName, clusteringKeyRaw.getAsString())); } } @@ -122,7 +121,7 @@ protected TableMetadata buildTableMetadata(String tableFullName, JsonObject tabl // Add columns if (!tableDefinition.keySet().contains(COLUMNS)) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_COLUMNS_NOT_SPECIFIED.buildMessage(tableFullName)); + SchemaLoaderError.PARSE_ERROR_COLUMNS_NOT_SPECIFIED.buildMessage(tableFullName)); } JsonObject columns = tableDefinition.get(COLUMNS).getAsJsonObject(); traveledKeys.add(COLUMNS); @@ -138,7 +137,7 @@ protected TableMetadata buildTableMetadata(String tableFullName, JsonObject tabl DataType dataType = DATA_MAP_TYPE.get(columnDataType.toUpperCase()); if (dataType == null) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_PARSE_ERROR_INVALID_COLUMN_TYPE.buildMessage( + SchemaLoaderError.PARSE_ERROR_INVALID_COLUMN_TYPE.buildMessage( tableFullName, columnName, column.getValue().getAsString())); } diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessor.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessor.java index 89a37fa12c..834bb4e990 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessor.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessor.java @@ -1,8 +1,8 @@ package com.scalar.db.schemaloader.alteration; import com.scalar.db.api.TableMetadata; -import com.scalar.db.common.error.CoreError; import com.scalar.db.io.DataType; +import com.scalar.db.schemaloader.SchemaLoaderError; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; @@ -48,30 +48,27 @@ private void checkUnsupportedAlteration( String namespace, String table, TableMetadata oldMetadata, TableMetadata newMetadata) { if (!newMetadata.getPartitionKeyNames().equals(oldMetadata.getPartitionKeyNames())) { throw new UnsupportedOperationException( - CoreError.SCHEMA_LOADER_ALTERING_PARTITION_KEYS_NOT_SUPPORTED.buildMessage( - namespace, table)); + SchemaLoaderError.ALTERING_PARTITION_KEYS_NOT_SUPPORTED.buildMessage(namespace, table)); } if (!newMetadata.getClusteringKeyNames().equals(oldMetadata.getClusteringKeyNames())) { throw new UnsupportedOperationException( - CoreError.SCHEMA_LOADER_ALTERING_CLUSTERING_KEYS_NOT_SUPPORTED.buildMessage( - namespace, table)); + SchemaLoaderError.ALTERING_CLUSTERING_KEYS_NOT_SUPPORTED.buildMessage(namespace, table)); } if (!newMetadata.getClusteringOrders().equals(oldMetadata.getClusteringOrders())) { throw new UnsupportedOperationException( - CoreError.SCHEMA_LOADER_ALTERING_CLUSTERING_ORDER_NOT_SUPPORTED.buildMessage( - namespace, table)); + SchemaLoaderError.ALTERING_CLUSTERING_ORDER_NOT_SUPPORTED.buildMessage(namespace, table)); } for (String oldColumn : oldMetadata.getColumnNames()) { if (!newMetadata.getColumnNames().contains(oldColumn)) { throw new UnsupportedOperationException( - CoreError.SCHEMA_LOADER_DELETING_COLUMN_NOT_SUPPORTED.buildMessage( + SchemaLoaderError.DELETING_COLUMN_NOT_SUPPORTED.buildMessage( oldColumn, namespace, table)); } } for (String column : oldMetadata.getColumnNames()) { if (!oldMetadata.getColumnDataType(column).equals(newMetadata.getColumnDataType(column))) { throw new UnsupportedOperationException( - CoreError.SCHEMA_LOADER_ALTERING_COLUMN_DATA_TYPE_NOT_SUPPORTED.buildMessage( + SchemaLoaderError.ALTERING_COLUMN_DATA_TYPE_NOT_SUPPORTED.buildMessage( column, namespace, table)); } } diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/command/SchemaLoaderCommand.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/command/SchemaLoaderCommand.java index e8293fb35d..91a97c72fc 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/command/SchemaLoaderCommand.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/command/SchemaLoaderCommand.java @@ -1,8 +1,8 @@ package com.scalar.db.schemaloader.command; import com.google.common.collect.ImmutableMap; -import com.scalar.db.common.error.CoreError; import com.scalar.db.schemaloader.SchemaLoader; +import com.scalar.db.schemaloader.SchemaLoaderError; import com.scalar.db.schemaloader.SchemaLoaderException; import com.scalar.db.storage.cassandra.CassandraAdmin; import com.scalar.db.storage.cassandra.CassandraAdmin.CompactionStrategy; @@ -142,8 +142,7 @@ private void createTables() throws SchemaLoaderException { private void repairAll() throws SchemaLoaderException { if (schemaFile == null) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_REPAIR_ALL - .buildMessage()); + SchemaLoaderError.SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_REPAIR_ALL.buildMessage()); } Map options = prepareAllOptions(); SchemaLoader.repairAll(configPath, schemaFile, options, coordinator, replicationTables); @@ -152,7 +151,7 @@ private void repairAll() throws SchemaLoaderException { private void alterTables() throws SchemaLoaderException { if (schemaFile == null) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_ALTER.buildMessage()); + SchemaLoaderError.SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_ALTER.buildMessage()); } Map options = prepareOptions(DynamoAdmin.NO_SCALING); SchemaLoader.alterTables(configPath, schemaFile, options); @@ -161,12 +160,12 @@ private void alterTables() throws SchemaLoaderException { private void importTables() throws SchemaLoaderException { if (schemaFile == null) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_IMPORT.buildMessage()); + SchemaLoaderError.SPECIFYING_SCHEMA_FILE_REQUIRED_WHEN_USING_IMPORT.buildMessage()); } if (coordinator) { throw new IllegalArgumentException( - CoreError.SCHEMA_LOADER_SPECIFYING_COORDINATOR_WITH_IMPORT_NOT_ALLOWED.buildMessage()); + SchemaLoaderError.SPECIFYING_COORDINATOR_WITH_IMPORT_NOT_ALLOWED.buildMessage()); } Map options = prepareAllOptions(); SchemaLoader.importTables(configPath, schemaFile, options); diff --git a/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaLoaderErrorTest.java b/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaLoaderErrorTest.java new file mode 100644 index 0000000000..4f3e45c8f6 --- /dev/null +++ b/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaLoaderErrorTest.java @@ -0,0 +1,15 @@ +package com.scalar.db.schemaloader; + +import java.util.Arrays; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +public class SchemaLoaderErrorTest { + + @Test + public void checkDuplicateErrorCode() { + Assertions.assertThat( + Arrays.stream(SchemaLoaderError.values()).map(SchemaLoaderError::buildCode)) + .doesNotHaveDuplicates(); + } +} diff --git a/schema-loader/src/test/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessorTest.java b/schema-loader/src/test/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessorTest.java index cafaaab243..5c9bba98ee 100644 --- a/schema-loader/src/test/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessorTest.java +++ b/schema-loader/src/test/java/com/scalar/db/schemaloader/alteration/TableMetadataAlterationProcessorTest.java @@ -69,8 +69,7 @@ public void computeAlteration_WithAddedClusteringKey_ShouldThrowIllegalArgumentE } @Test - public void - computeAlteration_WithModifiedClusteringKeySortOrdering_ShouldThrowIllegalArgumentException() { + public void computeAlteration_WithModifiedClusteringOrder_ShouldThrowIllegalArgumentException() { // Arrange TableMetadata oldMetadata = TableMetadata.newBuilder() @@ -91,7 +90,7 @@ public void computeAlteration_WithAddedClusteringKey_ShouldThrowIllegalArgumentE assertThatThrownBy( () -> processor.computeAlteration(NAMESPACE, TABLE, oldMetadata, newMetadata)) .isInstanceOf(UnsupportedOperationException.class) - .hasMessageContaining("clustering ordering"); + .hasMessageContaining("clustering order"); } @Test