File tree Expand file tree Collapse file tree 4 files changed +18
-0
lines changed
main/java/com/datastax/cdm
test/java/com/datastax/cdm/schema Expand file tree Collapse file tree 4 files changed +18
-0
lines changed Original file line number Diff line number Diff line change @@ -80,6 +80,7 @@ public enum PropertyType {
80
80
public static final String ORIGIN_WRITETIME_NAMES = "spark.cdm.schema.origin.column.writetime.names" ;
81
81
public static final String ALLOW_COLL_FOR_WRITETIME_TTL_CALC = "spark.cdm.schema.ttlwritetime.calc.useCollections" ;
82
82
83
+ public static final String ORIGIN_COLUMN_NAMES_TO_SKIP = "spark.cdm.schema.origin.column.skip" ;
83
84
public static final String ORIGIN_COLUMN_NAMES_TO_TARGET = "spark.cdm.schema.origin.column.names.to.target" ;
84
85
85
86
static {
@@ -93,6 +94,7 @@ public enum PropertyType {
93
94
defaults .put (ORIGIN_WRITETIME_AUTO , "true" );
94
95
types .put (ALLOW_COLL_FOR_WRITETIME_TTL_CALC , PropertyType .BOOLEAN );
95
96
defaults .put (ALLOW_COLL_FOR_WRITETIME_TTL_CALC , "false" );
97
+ types .put (ORIGIN_COLUMN_NAMES_TO_SKIP , PropertyType .STRING_LIST );
96
98
types .put (ORIGIN_COLUMN_NAMES_TO_TARGET , PropertyType .STRING_LIST );
97
99
}
98
100
Original file line number Diff line number Diff line change 26
26
import java .util .stream .Collectors ;
27
27
import java .util .stream .IntStream ;
28
28
29
+ import org .apache .commons .collections4 .CollectionUtils ;
29
30
import org .apache .commons .lang3 .StringUtils ;
30
31
import org .slf4j .Logger ;
31
32
import org .slf4j .LoggerFactory ;
@@ -470,7 +471,9 @@ private void setCqlMetadata(CqlSession cqlSession) {
470
471
}
471
472
}
472
473
String columnName = extractColumnName ;
474
+ List <String > skipColumns = propertyHelper .getStringList (KnownProperties .ORIGIN_COLUMN_NAMES_TO_SKIP );
473
475
this .cqlAllColumns = tableMetadata .getColumns ().values ().stream ().filter (md -> !this .cqlAllColumns .contains (md ))
476
+ .filter (md -> CollectionUtils .isEmpty (skipColumns ) || !skipColumns .contains (md .getName ().asCql (true )))
474
477
.filter (md -> !extractJsonExclusive || md .getName ().asCql (true ).endsWith (columnName ))
475
478
.collect (Collectors .toCollection (() -> this .cqlAllColumns ));
476
479
Original file line number Diff line number Diff line change @@ -96,6 +96,10 @@ spark.cdm.connect.target.password cassandra
96
96
# Other Parameters:
97
97
# spark.cdm.schema
98
98
# .origin.column
99
+ # .skip : Default is empty. A comma-separated list of columns that should be skipped.
100
+ # Only use this property when both origin and target tables contain the above columns.
101
+ # If the target table does not contain the column, CDM will detect it and auto-skip.
102
+ #
99
103
# .names.to.target : Default is empty. If column names are changed between Origin and Target, then
100
104
# this map-like list provides a mechanism to associate the two. The format is
101
105
# origin_column_name:target_column_name. The list is comma-separated. Only renamed
Original file line number Diff line number Diff line change 16
16
package com .datastax .cdm .schema ;
17
17
18
18
import static org .junit .jupiter .api .Assertions .assertEquals ;
19
+ import static org .junit .jupiter .api .Assertions .assertNull ;
19
20
20
21
import org .junit .jupiter .api .Test ;
21
22
@@ -39,4 +40,12 @@ void testCL() {
39
40
assertEquals (CqlTable .mapToConsistencyLevel ("all" ), ConsistencyLevel .ALL );
40
41
}
41
42
43
+ @ Test
44
+ void testformatName () {
45
+ assertNull (CqlTable .formatName (null ));
46
+ assertEquals ("" , CqlTable .formatName ("" ));
47
+ assertEquals ("\" KS123ks.T123able\" " , CqlTable .formatName ("KS123ks.T123able" ));
48
+ assertEquals ("\" Ks.Table\" " , CqlTable .formatName ("\" Ks.Table\" " ));
49
+ assertEquals ("\" ks.table\" " , CqlTable .formatName ("ks.table" ));
50
+ }
42
51
}
You can’t perform that action at this time.
0 commit comments