@@ -30,6 +30,8 @@ public class AbstractJobSession extends BaseJobSession {
30
30
protected List <String > ttlWTCols ;
31
31
protected String tsReplaceValStr ;
32
32
protected long tsReplaceVal ;
33
+ protected Long customWritetime = 0l ;
34
+ protected Long incrWritetime = 0l ;
33
35
34
36
protected AbstractJobSession (CqlSession sourceSession , CqlSession astraSession , SparkConf sc ) {
35
37
this (sourceSession , astraSession , sc , false );
@@ -67,7 +69,6 @@ protected AbstractJobSession(CqlSession sourceSession, CqlSession astraSession,
67
69
logger .info ("PARAM -- Destination Table: {}" , astraKeyspaceTable .split ("\\ ." )[1 ]);
68
70
logger .info ("PARAM -- ReadRateLimit: {}" , readLimiter .getRate ());
69
71
logger .info ("PARAM -- WriteRateLimit: {}" , writeLimiter .getRate ());
70
- logger .info ("PARAM -- WriteTimestampFilter: {}" , writeTimeStampFilter );
71
72
72
73
tableInfo = TableInfo .getInstance (sourceSession , sourceKeyspaceTable .split ("\\ ." )[0 ],
73
74
sourceKeyspaceTable .split ("\\ ." )[1 ], Util .getSparkPropOrEmpty (sc , "spark.query.origin" ));
@@ -95,13 +96,21 @@ protected AbstractJobSession(CqlSession sourceSession, CqlSession astraSession,
95
96
}
96
97
97
98
String customWriteTimeStr =
98
- Util .getSparkPropOr (sc , "spark.target.custom. writeTime" , "0" );
99
- if (null != customWriteTimeStr && customWriteTimeStr .trim ().length () > 1 && StringUtils .isNumeric (customWriteTimeStr .trim ())) {
99
+ Util .getSparkPropOr (sc , "spark.target.writeTime.fixedValue " , "0" );
100
+ if (null != customWriteTimeStr && customWriteTimeStr .trim ().length () > 0 && StringUtils .isNumeric (customWriteTimeStr .trim ())) {
100
101
customWritetime = Long .parseLong (customWriteTimeStr );
101
102
}
102
103
104
+ String incrWriteTimeStr =
105
+ Util .getSparkPropOr (sc , "spark.target.writeTime.incrementBy" , "0" );
106
+ if (null != incrWriteTimeStr && incrWriteTimeStr .trim ().length () > 0 && StringUtils .isNumeric (incrWriteTimeStr .trim ())) {
107
+ incrWritetime = Long .parseLong (incrWriteTimeStr );
108
+ }
109
+
103
110
logger .info ("PARAM -- TTL-WriteTime Columns: {}" , ttlWTCols );
104
- logger .info ("PARAM -- WriteTimestampFilter: {}" , writeTimeStampFilter );
111
+ logger .info ("PARAM -- WriteTimes Filter: {}" , writeTimeStampFilter );
112
+ logger .info ("PARAM -- WriteTime Custom Value: {}" , customWritetime );
113
+ logger .info ("PARAM -- WriteTime increment Value: {}" , incrWritetime );
105
114
if (writeTimeStampFilter ) {
106
115
logger .info ("PARAM -- minWriteTimeStampFilter: {} datetime is {}" , minWriteTimeStampFilter ,
107
116
Instant .ofEpochMilli (minWriteTimeStampFilter / 1000 ));
@@ -196,7 +205,7 @@ public BoundStatement bindInsert(PreparedStatement insertStatement, Row sourceRo
196
205
if (customWritetime > 0 ) {
197
206
boundInsertStatement = boundInsertStatement .set (index , customWritetime , Long .class );
198
207
} else {
199
- boundInsertStatement = boundInsertStatement .set (index , getLargestWriteTimeStamp (sourceRow ), Long .class );
208
+ boundInsertStatement = boundInsertStatement .set (index , getLargestWriteTimeStamp (sourceRow ) + incrWritetime , Long .class );
200
209
}
201
210
}
202
211
}
0 commit comments