Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
827b522
Merge branch 'feat/data-loader/scaladb-dao' into feat/data-loader/cli…
inv-jishnu Jan 28, 2025
b700ba3
Initial changes
inv-jishnu Jan 28, 2025
09f8bc9
Changes
inv-jishnu Jan 28, 2025
8f974d3
Merge branch 'feat/data-loader/cli-utils' into feat/data-loader/cli-i…
inv-jishnu Feb 4, 2025
e42eac0
Merge branch 'feat/data-loader/cli-utils' into feat/data-loader/cli-i…
inv-jishnu Feb 4, 2025
379bdb0
Changes
inv-jishnu Feb 4, 2025
0989421
Merge branch 'feat/data-loader/cli-utils' into feat/data-loader/cli-i…
inv-jishnu Feb 11, 2025
9ee73b1
Updated test class to be package private [skip ci]
inv-jishnu Feb 12, 2025
067b8ba
Add dtos and other classes for task (#2446)
inv-jishnu Feb 13, 2025
40fde36
Changes from master merged after resolving conflicts
inv-jishnu Feb 13, 2025
0dd2956
Error messages and adding java docs
inv-jishnu Feb 13, 2025
25037cd
Bump the dependencies group with 5 updates (#2535)
dependabot[bot] Feb 18, 2025
0ef70d2
Add workflow_dispatch to release workflows (#2537)
brfrn169 Feb 19, 2025
75f8a9c
Add export tasks (#2450)
inv-jishnu Feb 20, 2025
9d3ffb1
Resolved conflicts and merged latest changes from master
inv-jishnu Feb 24, 2025
868d9b5
Column util correction
inv-jishnu Feb 26, 2025
e4cd7fe
Minor corrections
inv-jishnu Feb 26, 2025
559de5c
Bump the dependencies group across 1 directory with 2 updates (#2552)
dependabot[bot] Feb 27, 2025
74c6925
Add support for new date, time data types (#2550)
inv-jishnu Feb 28, 2025
d0a73a3
Changes
inv-jishnu Mar 4, 2025
bffa85b
gradle change reverted
inv-jishnu Mar 4, 2025
328afe5
Resolved conflicts and merged changes from master
inv-jishnu Mar 4, 2025
adc7e56
Spotless applied
inv-jishnu Mar 4, 2025
5b61876
Fixed unit test
inv-jishnu Mar 4, 2025
b9842be
Reverted try-catch changes
inv-jishnu Mar 5, 2025
16ae46d
Optimizations
inv-jishnu Mar 10, 2025
6b2536e
Error message changes and further optimizations
inv-jishnu Mar 10, 2025
6aea83c
Improve javadocs for the data loader import process
ypeckstadt Mar 17, 2025
5cee332
Bump software.amazon.awssdk:bom from 2.30.2 to 2.31.3 in the dependen…
dependabot[bot] Mar 19, 2025
d18e647
Adjust precision of mapped datatype for ScalarDB BIGINT on Oracle (#2…
Torch3333 Mar 21, 2025
45d66ec
Update ScalarDB dependency version to 3.15.2 in README (#2576)
brfrn169 Mar 25, 2025
851b691
Changes added
inv-jishnu Mar 25, 2025
c835730
Removed unused test util methods [skip ci]
inv-jishnu Mar 25, 2025
ff87a9a
Merge branch 'master' into feat/data-loader/import-process
inv-jishnu Mar 25, 2025
8f7adc8
Fixed spotbugs test issues
inv-jishnu Mar 25, 2025
9358c8b
Bump com.azure:azure-cosmos from 4.67.0 to 4.68.0 in the dependencies…
dependabot[bot] Mar 25, 2025
3aff018
reader data updated [skip ci]
inv-jishnu Mar 25, 2025
35a758f
Merge branch 'master' into feat/data-loader/import-process
inv-jishnu Mar 25, 2025
24bfa37
Changes
inv-jishnu Apr 1, 2025
3054d5a
Bump org.mariadb.jdbc:mariadb-java-client from 3.5.2 to 3.5.3 in the …
dependabot[bot] Apr 2, 2025
05ac8ff
Merge branch 'master' into feat/data-loader/import-process
inv-jishnu Apr 2, 2025
d9f239c
Thread exexcuter changes
inv-jishnu Apr 3, 2025
723bd51
Changed few values to be configurable
inv-jishnu Apr 3, 2025
450aaea
Added new line
inv-jishnu Apr 4, 2025
aeaa08f
reverted config utils and add CLI options
inv-jishnu Apr 6, 2025
44bf503
Updated tests
inv-jishnu Apr 7, 2025
a5c0b91
Removed explict passing of thread size and use it directly
inv-jishnu Apr 7, 2025
44aa54e
Import command options updated
inv-jishnu Apr 9, 2025
85a81fc
Merge branch 'feat/data-loader/import-process' into feat/data-loader/…
inv-jishnu Apr 9, 2025
1d9cfda
Import command options updated
inv-jishnu Apr 9, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/release-snapshot.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
name: Release SNAPSHOT

on:
workflow_dispatch:
push:
branches:
- master
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
name: Release

on:
workflow_dispatch:
push:
tags:
- "v[0-9]+.[0-9]+.[0-9]+"
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ You can install it in your application using your build tool such as Gradle and
To add a dependency on ScalarDB using Gradle, use the following:
```gradle
dependencies {
implementation 'com.scalar-labs:scalardb:3.15.1'
implementation 'com.scalar-labs:scalardb:3.15.2'
}
```

Expand All @@ -32,7 +32,7 @@ To add a dependency using Maven:
<dependency>
<groupId>com.scalar-labs</groupId>
<artifactId>scalardb</artifactId>
<version>3.15.1</version>
<version>3.15.2</version>
</dependency>
```

Expand Down
12 changes: 6 additions & 6 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,17 @@ subprojects {
guavaVersion = '32.1.3-jre'
slf4jVersion = '1.7.36'
cassandraDriverVersion = '3.11.5'
azureCosmosVersion = '4.66.0'
azureCosmosVersion = '4.68.0'
jooqVersion = '3.14.16'
awssdkVersion = '2.30.2'
awssdkVersion = '2.31.3'
commonsDbcp2Version = '2.13.0'
mysqlDriverVersion = '8.4.0'
postgresqlDriverVersion = '42.7.5'
oracleDriverVersion = '23.6.0.24.10'
oracleDriverVersion = '23.7.0.25.01'
sqlserverDriverVersion = '12.8.1.jre8'
sqliteDriverVersion = '3.48.0.0'
yugabyteDriverVersion = '42.7.3-yb-2'
mariadDbDriverVersion = '3.5.1'
sqliteDriverVersion = '3.49.1.0'
yugabyteDriverVersion = '42.7.3-yb-3'
mariadDbDriverVersion = '3.5.3'
picocliVersion = '4.7.6'
commonsTextVersion = '1.13.0'
junitVersion = '5.11.4'
Expand Down
48 changes: 44 additions & 4 deletions core/src/main/java/com/scalar/db/common/error/CoreError.java
Original file line number Diff line number Diff line change
Expand Up @@ -804,24 +804,50 @@ public enum CoreError implements ScalarDbError {
""),
DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN(
Category.USER_ERROR,
"0175",
"0174",
"Missing required field or column mapping for clustering key %s",
"",
""),
DATA_LOADER_MISSING_PARTITION_KEY_COLUMN(
Category.USER_ERROR,
"0176",
"0175",
"Missing required field or column mapping for partition key %s",
"",
""),
DATA_LOADER_MISSING_COLUMN(
Category.USER_ERROR, "0177", "Missing field or column mapping for %s", "", ""),
Category.USER_ERROR, "0176", "Missing field or column mapping for %s", "", ""),
DATA_LOADER_VALUE_TO_STRING_CONVERSION_FAILED(
Category.USER_ERROR,
"0177",
"Something went wrong while converting the ScalarDB values to strings. The table metadata and Value datatype probably do not match. Details: %s",
"",
""),
DATA_LOADER_FILE_FORMAT_NOT_SUPPORTED(
Category.USER_ERROR, "0178", "The provided file format is not supported : %s", "", ""),
DATA_LOADER_COULD_NOT_FIND_PARTITION_KEY(
Category.USER_ERROR, "0179", "Could not find the partition key", "", ""),
DATA_LOADER_UPSERT_INSERT_MISSING_COLUMNS(
Category.USER_ERROR,
"0180",
"The source record needs to contain all fields if the UPSERT turns into an INSERT",
"",
""),
DATA_LOADER_DATA_ALREADY_EXISTS(Category.USER_ERROR, "0181", "Record already exists", "", ""),
DATA_LOADER_DATA_NOT_FOUND(Category.USER_ERROR, "0182", "Record was not found", "", ""),
DATA_LOADER_COULD_NOT_FIND_CLUSTERING_KEY(
Category.USER_ERROR, "0183", "Could not find the clustering key", "", ""),
DATA_LOADER_TABLE_METADATA_MISSING(
Category.USER_ERROR, "0184", "No table metadata found", "", ""),
DATA_LOADER_MISSING_SOURCE_FIELD(
Category.USER_ERROR,
"0178",
"0185",
"The data mapping source field '%s' for table '%s' is missing in the json data record",
"",
""),
DATA_LOADER_CSV_DATA_MISMATCH(
Category.USER_ERROR, "0186", "The CSV row: %s does not match header: %s.", "", ""),
DATA_LOADER_JSON_CONTENT_START_ERROR(
Category.USER_ERROR, "0187", "Expected JSON file content to be an array", "", ""),

//
// Errors for the concurrency error category
Expand Down Expand Up @@ -1085,6 +1111,20 @@ public enum CoreError implements ScalarDbError {
"Something went wrong while scanning. Are you sure you are running in the correct transaction mode? Details: %s",
"",
""),
DATA_LOADER_CSV_FILE_READ_FAILED(
Category.INTERNAL_ERROR, "0049", "Failed to read CSV file. Details: %s.", "", ""),
DATA_LOADER_CSV_FILE_HEADER_READ_FAILED(
Category.INTERNAL_ERROR, "0050", "Failed to CSV read header line. Details: %s.", "", ""),
DATA_LOADER_DATA_CHUNK_PROCESS_FAILED(
Category.INTERNAL_ERROR,
"0051",
"Data chunk processing was interrupted. Details: %s",
"",
""),
DATA_LOADER_JSON_FILE_READ_FAILED(
Category.INTERNAL_ERROR, "0052", "Failed to read JSON file. Details: %s.", "", ""),
DATA_LOADER_JSONLINES_FILE_READ_FAILED(
Category.INTERNAL_ERROR, "0053", "Failed to read JSON Lines file. Details: %s.", "", ""),

//
// Errors for the unknown transaction status error category
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ public boolean isDuplicateIndexError(SQLException e) {
public String getDataTypeForEngine(DataType scalarDbDataType) {
switch (scalarDbDataType) {
case BIGINT:
return "NUMBER(19)";
return "NUMBER(16)";
case BLOB:
return "RAW(2000)";
case BOOLEAN:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@ public void createTableInternal_ForSqlServer_ShouldCreateTableAndIndexes() throw
public void createTableInternal_ForOracle_ShouldCreateTableAndIndexes() throws SQLException {
createTableInternal_ForX_CreateTableAndIndexes(
RdbEngine.ORACLE,
"CREATE TABLE \"my_ns\".\"foo_table\"(\"c3\" NUMBER(1),\"c1\" VARCHAR2(128),\"c4\" RAW(128),\"c2\" NUMBER(19),\"c5\" NUMBER(10),\"c6\" BINARY_DOUBLE,\"c7\" BINARY_FLOAT,\"c8\" DATE,\"c9\" TIMESTAMP(6),\"c10\" TIMESTAMP(3),\"c11\" TIMESTAMP(3) WITH TIME ZONE, PRIMARY KEY (\"c3\",\"c1\",\"c4\")) ROWDEPENDENCIES",
"CREATE TABLE \"my_ns\".\"foo_table\"(\"c3\" NUMBER(1),\"c1\" VARCHAR2(128),\"c4\" RAW(128),\"c2\" NUMBER(16),\"c5\" NUMBER(10),\"c6\" BINARY_DOUBLE,\"c7\" BINARY_FLOAT,\"c8\" DATE,\"c9\" TIMESTAMP(6),\"c10\" TIMESTAMP(3),\"c11\" TIMESTAMP(3) WITH TIME ZONE, PRIMARY KEY (\"c3\",\"c1\",\"c4\")) ROWDEPENDENCIES",
"ALTER TABLE \"my_ns\".\"foo_table\" INITRANS 3 MAXTRANS 255",
"CREATE UNIQUE INDEX \"my_ns.foo_table_clustering_order_idx\" ON \"my_ns\".\"foo_table\" (\"c3\" ASC,\"c1\" DESC,\"c4\" ASC)",
"CREATE INDEX \"index_my_ns_foo_table_c4\" ON \"my_ns\".\"foo_table\" (\"c4\")",
Expand All @@ -539,7 +539,7 @@ public void createTableInternal_ForOracle_ShouldCreateTableAndIndexes() throws S
when(config.getOracleVariableKeyColumnSize()).thenReturn(64);
createTableInternal_ForX_CreateTableAndIndexes(
new RdbEngineOracle(config),
"CREATE TABLE \"my_ns\".\"foo_table\"(\"c3\" NUMBER(1),\"c1\" VARCHAR2(64),\"c4\" RAW(64),\"c2\" NUMBER(19),\"c5\" NUMBER(10),\"c6\" BINARY_DOUBLE,\"c7\" BINARY_FLOAT,\"c8\" DATE,\"c9\" TIMESTAMP(6),\"c10\" TIMESTAMP(3),\"c11\" TIMESTAMP(3) WITH TIME ZONE, PRIMARY KEY (\"c3\",\"c1\",\"c4\")) ROWDEPENDENCIES",
"CREATE TABLE \"my_ns\".\"foo_table\"(\"c3\" NUMBER(1),\"c1\" VARCHAR2(64),\"c4\" RAW(64),\"c2\" NUMBER(16),\"c5\" NUMBER(10),\"c6\" BINARY_DOUBLE,\"c7\" BINARY_FLOAT,\"c8\" DATE,\"c9\" TIMESTAMP(6),\"c10\" TIMESTAMP(3),\"c11\" TIMESTAMP(3) WITH TIME ZONE, PRIMARY KEY (\"c3\",\"c1\",\"c4\")) ROWDEPENDENCIES",
"ALTER TABLE \"my_ns\".\"foo_table\" INITRANS 3 MAXTRANS 255",
"CREATE UNIQUE INDEX \"my_ns.foo_table_clustering_order_idx\" ON \"my_ns\".\"foo_table\" (\"c3\" ASC,\"c1\" DESC,\"c4\" ASC)",
"CREATE INDEX \"index_my_ns_foo_table_c4\" ON \"my_ns\".\"foo_table\" (\"c4\")",
Expand Down Expand Up @@ -643,7 +643,7 @@ public void createTableInternal_IfNotExistsForOracle_ShouldCreateTableAndIndexes
throws SQLException {
createTableInternal_IfNotExistsForX_createTableAndIndexesIfNotExists(
RdbEngine.ORACLE,
"CREATE TABLE \"my_ns\".\"foo_table\"(\"c3\" NUMBER(1),\"c1\" VARCHAR2(128),\"c4\" RAW(128),\"c2\" NUMBER(19),\"c5\" NUMBER(10),\"c6\" BINARY_DOUBLE,\"c7\" BINARY_FLOAT,\"c8\" DATE,\"c9\" TIMESTAMP(6),\"c10\" TIMESTAMP(3),\"c11\" TIMESTAMP(3) WITH TIME ZONE, PRIMARY KEY (\"c3\",\"c1\",\"c4\")) ROWDEPENDENCIES",
"CREATE TABLE \"my_ns\".\"foo_table\"(\"c3\" NUMBER(1),\"c1\" VARCHAR2(128),\"c4\" RAW(128),\"c2\" NUMBER(16),\"c5\" NUMBER(10),\"c6\" BINARY_DOUBLE,\"c7\" BINARY_FLOAT,\"c8\" DATE,\"c9\" TIMESTAMP(6),\"c10\" TIMESTAMP(3),\"c11\" TIMESTAMP(3) WITH TIME ZONE, PRIMARY KEY (\"c3\",\"c1\",\"c4\")) ROWDEPENDENCIES",
"ALTER TABLE \"my_ns\".\"foo_table\" INITRANS 3 MAXTRANS 255",
"CREATE UNIQUE INDEX \"my_ns.foo_table_clustering_order_idx\" ON \"my_ns\".\"foo_table\" (\"c3\" ASC,\"c1\" DESC,\"c4\" ASC)",
"CREATE INDEX \"index_my_ns_foo_table_c4\" ON \"my_ns\".\"foo_table\" (\"c4\")",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package com.scalar.db.dataloader.cli;

public class ErrorMessage {
public static final String ERROR_IMPORT_TARGET_MISSING =
"Missing option: either '--namespace' and'--table' or '--control-file' options must be specified.";
public static final String ERROR_MISSING_FILE =
"File '%s' specified by argument '%s' does not exist.";
public static final String ERROR_LOG_DIRECTORY_WRITE_ACCESS =
"Not able to write to the log directory %s";
public static final String ERROR_CREATE_LOG_DIRECTORY_FAILED =
"Failed to create the log directory %s";
public static final String ERROR_CONTROL_FILE_INVALID_JSON =
"Not able to parse the %s control file";
}
Loading
Loading