|
16 | 16 |
|
17 | 17 | public class AbstractJobSession extends BaseJobSession {
|
18 | 18 |
|
19 |
| - |
| 19 | + public Logger logger = LoggerFactory.getLogger(this.getClass().getName()); |
| 20 | + |
20 | 21 | protected AbstractJobSession(CqlSession sourceSession, CqlSession astraSession, SparkConf sparkConf) {
|
21 | 22 | this.sourceSession = sourceSession;
|
22 | 23 | this.astraSession = astraSession;
|
@@ -68,45 +69,69 @@ protected AbstractJobSession(CqlSession sourceSession, CqlSession astraSession,
|
68 | 69 | maxWriteTimeStampFilter = Long.parseLong(maxWriteTimeStampFilterStr);
|
69 | 70 | }
|
70 | 71 |
|
71 |
| - logger.info(" DEFAULT -- Write Batch Size: " + batchSize); |
72 |
| - logger.info(" DEFAULT -- Source Keyspace Table: " + sourceKeyspaceTable); |
73 |
| - logger.info(" DEFAULT -- Destination Keyspace Table: " + astraKeyspaceTable); |
74 |
| - logger.info(" DEFAULT -- ReadRateLimit: " + readLimiter.getRate()); |
75 |
| - logger.info(" DEFAULT -- WriteRateLimit: " + writeLimiter.getRate()); |
76 |
| - logger.info(" DEFAULT -- WriteTimestampFilter: " + writeTimeStampFilter); |
77 |
| - logger.info(" DEFAULT -- WriteTimestampFilterCols: " + writeTimeStampCols); |
78 |
| - logger.info(" DEFAULT -- isPreserveTTLWritetime: " + isPreserveTTLWritetime); |
79 |
| - logger.info(" DEFAULT -- TTLCols: " + ttlCols); |
| 72 | + logger.info("PARAM -- Write Batch Size: " + batchSize); |
| 73 | + logger.info("PARAM -- Source Keyspace Table: " + sourceKeyspaceTable); |
| 74 | + logger.info("PARAM -- Destination Keyspace Table: " + astraKeyspaceTable); |
| 75 | + logger.info("PARAM -- ReadRateLimit: " + readLimiter.getRate()); |
| 76 | + logger.info("PARAM -- WriteRateLimit: " + writeLimiter.getRate()); |
| 77 | + logger.info("PARAM -- WriteTimestampFilter: " + writeTimeStampFilter); |
| 78 | + logger.info("PARAM -- WriteTimestampFilterCols: " + writeTimeStampCols); |
| 79 | + logger.info("PARAM -- isPreserveTTLWritetime: " + isPreserveTTLWritetime); |
| 80 | + logger.info("PARAM -- isPreserveTTLWritetime: " + isPreserveTTLWritetime); |
| 81 | + logger.info("PARAM -- TTLCols: " + ttlCols); |
| 82 | + |
| 83 | + String selectCols = sparkConf.get("spark.query.source"); |
| 84 | + String partionKey = sparkConf.get("spark.query.source.partitionKey"); |
| 85 | + selectColTypes = getTypes(sparkConf.get("spark.query.types")); |
| 86 | + String idCols = sparkConf.get("spark.query.destination.id", ""); |
| 87 | + idColTypes = selectColTypes.subList(0, idCols.split(",").length); |
| 88 | + String sourceSelectCondition = sparkConf.get("spark.query.condition", ""); |
| 89 | + sourceSelectStatement = sourceSession.prepare( |
| 90 | + "select " + selectCols + " from " + sourceKeyspaceTable + " where token(" + partionKey.trim() |
| 91 | + + ") >= ? and token(" + partionKey.trim() + ") <= ? " + sourceSelectCondition + " ALLOW FILTERING"); |
80 | 92 |
|
81 |
| - hasRandomPartitioner = Boolean.parseBoolean(sparkConf.get("spark.source.hasRandomPartitioner", "false")); |
| 93 | + String insertCols = sparkConf.get("spark.query.destination", ""); |
| 94 | + if (null == insertCols || insertCols.trim().isEmpty()) { |
| 95 | + insertCols = selectCols; |
| 96 | + } |
| 97 | + String insertBinds = ""; |
| 98 | + for (String str : idCols.split(",")) { |
| 99 | + if (insertBinds.isEmpty()) { |
| 100 | + insertBinds = str + "= ?"; |
| 101 | + } else { |
| 102 | + insertBinds += " and " + str + "= ?"; |
| 103 | + } |
| 104 | + } |
| 105 | + astraSelectStatement = astraSession.prepare( |
| 106 | + "select " + insertCols + " from " + astraKeyspaceTable |
| 107 | + + " where " + insertBinds); |
82 | 108 |
|
| 109 | + hasRandomPartitioner = Boolean.parseBoolean(sparkConf.get("spark.source.hasRandomPartitioner", "false")); |
83 | 110 | isCounterTable = Boolean.parseBoolean(sparkConf.get("spark.counterTable", "false"));
|
84 |
| - selectColTypes = getTypes(sparkConf.get("spark.diff.select.types")); |
85 |
| - String partionKey = sparkConf.get("spark.query.cols.partitionKey"); |
86 |
| - String idCols = sparkConf.get("spark.query.cols.id"); |
87 |
| - idColTypes = getTypes(sparkConf.get("spark.query.cols.id.types")); |
| 111 | + if (isCounterTable) { |
| 112 | + String updateSelectMappingStr = sparkConf.get("spark.counterTable.cql.index", "0"); |
| 113 | + for (String updateSelectIndex : updateSelectMappingStr.split(",")) { |
| 114 | + updateSelectMapping.add(Integer.parseInt(updateSelectIndex)); |
| 115 | + } |
88 | 116 |
|
89 |
| - String selectCols = sparkConf.get("spark.query.cols.select"); |
| 117 | + String counterTableUpdate = sparkConf.get("spark.counterTable.cql"); |
| 118 | + astraInsertStatement = astraSession.prepare(counterTableUpdate); |
| 119 | + } else { |
| 120 | + insertBinds = ""; |
| 121 | + for (String str : insertCols.split(",")) { |
| 122 | + if (insertBinds.isEmpty()) { |
| 123 | + insertBinds += "?"; |
| 124 | + } else { |
| 125 | + insertBinds += ", ?"; |
| 126 | + } |
| 127 | + } |
90 | 128 |
|
91 |
| - String idBinds = ""; |
92 |
| - int count = 1; |
93 |
| - for (String str : idCols.split(",")) { |
94 |
| - if (count > 1) { |
95 |
| - idBinds = idBinds + " and " + str + "= ?"; |
| 129 | + if (isPreserveTTLWritetime) { |
| 130 | + astraInsertStatement = astraSession.prepare("insert into " + astraKeyspaceTable + " (" + insertCols + ") VALUES (" + insertBinds + ") using TTL ? and TIMESTAMP ?"); |
96 | 131 | } else {
|
97 |
| - idBinds = str + "= ?"; |
| 132 | + astraInsertStatement = astraSession.prepare("insert into " + astraKeyspaceTable + " (" + insertCols + ") VALUES (" + insertBinds + ")"); |
98 | 133 | }
|
99 |
| - count++; |
100 | 134 | }
|
101 |
| - |
102 |
| - sourceSelectCondition = sparkConf.get("spark.query.cols.select.condition", ""); |
103 |
| - sourceSelectStatement = sourceSession.prepare( |
104 |
| - "select " + selectCols + " from " + sourceKeyspaceTable + " where token(" + partionKey.trim() |
105 |
| - + ") >= ? and token(" + partionKey.trim() + ") <= ? " + sourceSelectCondition + " ALLOW FILTERING"); |
106 |
| - |
107 |
| - astraSelectStatement = astraSession.prepare( |
108 |
| - "select " + selectCols + " from " + astraKeyspaceTable |
109 |
| - + " where " + idBinds); |
110 | 135 | }
|
111 | 136 |
|
112 | 137 | public List<MigrateDataType> getTypes(String types) {
|
|
0 commit comments