15
15
16
16
import java .time .Duration ;
17
17
import java .time .Instant ;
18
+ import java .util .Arrays ;
18
19
import java .util .List ;
19
20
import java .util .Optional ;
21
+ import java .util .stream .Collectors ;
20
22
import java .util .stream .IntStream ;
21
23
22
24
public class AbstractJobSession extends BaseJobSession {
@@ -119,14 +121,8 @@ protected AbstractJobSession(CqlSession sourceSession, CqlSession astraSession,
119
121
if (null == insertCols || insertCols .trim ().isEmpty ()) {
120
122
insertCols = selectCols ;
121
123
}
122
- String insertBinds = "" ;
123
- for (String idCol : tableInfo .getKeyColumns ()) {
124
- if (insertBinds .isEmpty ()) {
125
- insertBinds = idCol + "= ?" ;
126
- } else {
127
- insertBinds += " and " + idCol + "= ?" ;
128
- }
129
- }
124
+ String insertBinds = String .join (" and " ,
125
+ tableInfo .getKeyColumns ().stream ().map (col -> col + " = ?" ).collect (Collectors .toList ()));
130
126
131
127
String originSelectQry ;
132
128
if (!isJobMigrateRowsFromFile ) {
@@ -144,32 +140,22 @@ protected AbstractJobSession(CqlSession sourceSession, CqlSession astraSession,
144
140
astraSelectStatement = astraSession .prepare (targetSelectQry );
145
141
146
142
isCounterTable = tableInfo .isCounterTable ();
143
+ String fullInsertQuery ;
147
144
if (isCounterTable ) {
148
- String updateSelectMappingStr = Util .getSparkPropOr (sc , "spark.counterTable.cql.index" , "0" );
149
- for (String updateSelectIndex : updateSelectMappingStr .split ("," )) {
150
- updateSelectMapping .add (Integer .parseInt (updateSelectIndex ));
151
- }
152
-
153
- String counterTableUpdate = Util .getSparkProp (sc , "spark.counterTable.cql" );
154
- astraInsertStatement = astraSession .prepare (counterTableUpdate );
155
- String fullInsertQuery = "update " + astraKeyspaceTable + " set (" + insertCols + ") VALUES (" + insertBinds + ")" ;
145
+ String updateCols = String .join (" , " ,
146
+ tableInfo .getOtherColumns ().stream ().map (s -> s + " += ?" ).collect (Collectors .toList ()));
147
+ String updateKeys = String .join (" and " ,
148
+ tableInfo .getKeyColumns ().stream ().map (s -> s + " = ?" ).collect (Collectors .toList ()));
149
+ fullInsertQuery = "update " + astraKeyspaceTable + " set " + updateCols + " where " + updateKeys ;
156
150
} else {
157
- insertBinds = "" ;
158
- for (String str : insertCols .split ("," )) {
159
- if (insertBinds .isEmpty ()) {
160
- insertBinds += "?" ;
161
- } else {
162
- insertBinds += ", ?" ;
163
- }
164
- }
165
-
166
- String fullInsertQuery = "insert into " + astraKeyspaceTable + " (" + insertCols + ") VALUES (" + insertBinds + ")" ;
151
+ insertBinds = String .join (" , " , Arrays .stream (insertCols .split ("," )).map (col -> " ?" ).collect (Collectors .toList ()));
152
+ fullInsertQuery = "insert into " + astraKeyspaceTable + " (" + insertCols + ") VALUES (" + insertBinds + ")" ;
167
153
if (!ttlWTCols .isEmpty ()) {
168
154
fullInsertQuery += " USING TTL ? AND TIMESTAMP ?" ;
169
155
}
170
- logger .info ("PARAM -- Target insert query: {}" , fullInsertQuery );
171
- astraInsertStatement = astraSession .prepare (fullInsertQuery );
172
156
}
157
+ logger .info ("PARAM -- Target insert query: {}" , fullInsertQuery );
158
+ astraInsertStatement = astraSession .prepare (fullInsertQuery );
173
159
174
160
// Handle rows with blank values for 'timestamp' data-type in primary-key fields
175
161
tsReplaceValStr = Util .getSparkPropOr (sc , "spark.target.replace.blankTimestampKeyUsingEpoch" , "" );
@@ -182,15 +168,21 @@ public BoundStatement bindInsert(PreparedStatement insertStatement, Row sourceRo
182
168
BoundStatement boundInsertStatement = insertStatement .bind ().setConsistencyLevel (writeConsistencyLevel );
183
169
184
170
if (isCounterTable ) {
185
- for (int index = 0 ; index < tableInfo .getAllColumns ().size (); index ++) {
186
- TypeInfo typeInfo = tableInfo .getColumns ().get (index ).getTypeInfo ();
171
+ for (int index = 0 ; index < tableInfo .getNonKeyColumns ().size (); index ++) {
172
+ TypeInfo typeInfo = tableInfo .getNonKeyColumns ().get (index ).getTypeInfo ();
173
+ int colIdx = tableInfo .getIdColumns ().size () + index ;
187
174
// compute the counter delta if reading from astra for the difference
188
- if (astraRow != null && index < ( tableInfo . getColumns (). size () - tableInfo . getIdColumns (). size ()) ) {
189
- boundInsertStatement = boundInsertStatement .set (index , (sourceRow .getLong (updateSelectMapping . get ( index )) - astraRow .getLong (updateSelectMapping . get ( index ) )), Long .class );
175
+ if (astraRow != null ) {
176
+ boundInsertStatement = boundInsertStatement .set (index , (sourceRow .getLong (colIdx ) - astraRow .getLong (colIdx )), Long .class );
190
177
} else {
191
- boundInsertStatement = boundInsertStatement .set (index , getData ( typeInfo , updateSelectMapping . get ( index ), sourceRow ), typeInfo . getTypeClass () );
178
+ boundInsertStatement = boundInsertStatement .set (index , sourceRow . getLong ( colIdx ), Long . class );
192
179
}
193
180
}
181
+ for (int index = 0 ; index < tableInfo .getIdColumns ().size (); index ++) {
182
+ TypeInfo typeInfo = tableInfo .getIdColumns ().get (index ).getTypeInfo ();
183
+ int colIdx = tableInfo .getNonKeyColumns ().size () + index ;
184
+ boundInsertStatement = boundInsertStatement .set (colIdx , getData (typeInfo , index , sourceRow ), typeInfo .getTypeClass ());
185
+ }
194
186
} else {
195
187
int index = 0 ;
196
188
for (index = 0 ; index < tableInfo .getAllColumns ().size (); index ++) {
0 commit comments