@@ -200,7 +200,7 @@ spark.cdm.perfops.ratelimit.target 40000
200
200
# constant value to be used in its place, separate from the Constant
201
201
# Values feature.
202
202
# .custom
203
- # .writetime Default is 0 (diabled ). Timestamp value in microseconds to use as the
203
+ # .writetime Default is 0 (disabled ). Timestamp value in microseconds to use as the
204
204
# WRITETIME for the target record. This is useful when the WRITETIME of
205
205
# the record in Origin cannot be determined (such as the only non-key
206
206
# columns are collections). This parameter allows a crude constant value
@@ -254,8 +254,10 @@ spark.cdm.perfops.ratelimit.target 40000
254
254
255
255
# ===========================================================================================================
256
256
# Java Filters are applied on the client node. Data must be pulled from the origin cluster and then filtered,
257
- # but this may have a lower impact on the production cluster than the Cassandra Filters.
258
- # node may need to do a lot more work than is normal.
257
+ # but this may have a lower impact on the production cluster than the Cassandra Filters. Java filters put
258
+ # load onto the Cassandra Data Migrator processing node, by sending more data from Cassandra.
259
+ # Cassandra filters put load on the Cassandra nodes, notably because Cassandra Data Migrator specifies
260
+ # ALLOW FILTERING, which could cause the coordinator node to perform a lot more work.
259
261
#
260
262
# spark.cdm.filter.java
261
263
# .token.percent : Percent (between 1 and 100) of the token in each Split that will be migrated.
@@ -299,6 +301,7 @@ spark.cdm.perfops.ratelimit.target 40000
299
301
# because some type values contain commas, e.g. lists, maps, sets, etc.
300
302
# -----------------------------------------------------------------------------------------------------------
301
303
# spark.cdm.feature.constantColumns.names const1,const2
304
+ # spark.cdm.feature.constantColumns.types
302
305
# spark.cdm.feature.constantColumns.values 'abcd',1234
303
306
# spark.cdm.feature.constantColumns.splitRegex ,
304
307
0 commit comments