Skip to content

Commit e748c62

Browse files
committed
Merge branch 'feature/ssl-support' into feature/percent_random
* feature/ssl-support: Added support for custom cipher algorithms (needed when limited ciphers are allowed)
2 parents ef4d35a + d004099 commit e748c62

File tree

2 files changed

+15
-4
lines changed

2 files changed

+15
-4
lines changed

src/main/scala/datastax/astra/migrate/AbstractJob.scala

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ class AbstractJob extends App {
2626
val sourceTrustStoreType = sc.getConf.get("spark.migrate.source.trustStore.type", "JKS")
2727
val sourceKeyStorePath = sc.getConf.get("spark.migrate.source.keyStore.path", "")
2828
val sourceKeyStorePassword = sc.getConf.get("spark.migrate.source.keyStore.password", "")
29+
val sourceEnabledAlgorithms = sc.getConf.get("spark.migrate.source.enabledAlgorithms", "")
2930

3031
val destinationIsAstra = sc.getConf.get("spark.migrate.destination.isAstra", "true")
3132
val destinationScbPath = sc.getConf.get("spark.migrate.destination.scb", "")
@@ -38,17 +39,18 @@ class AbstractJob extends App {
3839
val destinationTrustStoreType = sc.getConf.get("spark.migrate.destination.trustStore.type", "JKS")
3940
val destinationKeyStorePath = sc.getConf.get("spark.migrate.destination.keyStore.path", "")
4041
val destinationKeyStorePassword = sc.getConf.get("spark.migrate.destination.keyStore.password", "")
42+
val destinationEnabledAlgorithms = sc.getConf.get("spark.migrate.destination.enabledAlgorithms", "")
4143

4244
val minPartition = new BigInteger(sc.getConf.get("spark.migrate.source.minPartition"))
4345
val maxPartition = new BigInteger(sc.getConf.get("spark.migrate.source.maxPartition"))
4446

4547
val splitSize = sc.getConf.get("spark.migrate.splitSize", "10000")
4648

4749
var sourceConnection = getConnection(true, sourceIsAstra, sourceScbPath, sourceHost, sourceUsername, sourcePassword, sourceReadConsistencyLevel,
48-
sourceTrustStorePath, sourceTrustStorePassword, sourceTrustStoreType, sourceKeyStorePath, sourceKeyStorePassword);
50+
sourceTrustStorePath, sourceTrustStorePassword, sourceTrustStoreType, sourceKeyStorePath, sourceKeyStorePassword, sourceEnabledAlgorithms);
4951

5052
var destinationConnection = getConnection(false, destinationIsAstra, destinationScbPath, destinationHost, destinationUsername, destinationPassword, destinationReadConsistencyLevel,
51-
destinationTrustStorePath, destinationTrustStorePassword, destinationTrustStoreType, destinationKeyStorePath, destinationKeyStorePassword);
53+
destinationTrustStorePath, destinationTrustStorePassword, destinationTrustStoreType, destinationKeyStorePath, destinationKeyStorePassword, destinationEnabledAlgorithms);
5254

5355
protected def exitSpark() = {
5456
spark.stop()
@@ -57,7 +59,7 @@ class AbstractJob extends App {
5759

5860
private def getConnection(isSource: Boolean, isAstra: String, scbPath: String, host: String, username: String, password: String, readConsistencyLevel: String,
5961
trustStorePath: String, trustStorePassword: String, trustStoreType: String,
60-
keyStorePath: String, keyStorePassword: String): CassandraConnector = {
62+
keyStorePath: String, keyStorePassword: String, enabledAlgorithms: String): CassandraConnector = {
6163
var connType: String = "Source"
6264
if (!isSource) {
6365
connType = "Destination"
@@ -74,17 +76,24 @@ class AbstractJob extends App {
7476
} else if (null != trustStorePath && !trustStorePath.trim.isEmpty) {
7577
abstractLogger.info(connType + ": Connected to Cassandra (or DSE) with SSL!");
7678

79+
// Use defaults when not provided
80+
var enabledAlgorithmsVar = enabledAlgorithms
81+
if (enabledAlgorithms == null || enabledAlgorithms.trim.isEmpty) {
82+
enabledAlgorithmsVar = "TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA"
83+
}
84+
7785
return CassandraConnector(sc.getConf
7886
.set("spark.cassandra.auth.username", username)
7987
.set("spark.cassandra.auth.password", password)
8088
.set("spark.cassandra.input.consistency.level", readConsistencyLevel)
8189
.set("spark.cassandra.connection.host", host)
8290
.set("spark.cassandra.connection.ssl.enabled", "true")
91+
.set("spark.cassandra.connection.ssl.enabledAlgorithms", enabledAlgorithmsVar)
8392
.set("spark.cassandra.connection.ssl.trustStore.password", trustStorePassword)
8493
.set("spark.cassandra.connection.ssl.trustStore.path", trustStorePath)
8594
.set("spark.cassandra.connection.ssl.keyStore.password", keyStorePassword)
8695
.set("spark.cassandra.connection.ssl.keyStore.path", keyStorePath)
87-
.set("spark.cassandra.connection.ssl.trustStore.type", trustStoreType)
96+
.set("spark.cassandra.connection.ssl.trustStore.type", trustStoreType)
8897
.set("spark.cassandra.connection.ssl.clientAuth.enabled", "true")
8998
)
9099
} else {

src/resources/sparkConf.properties

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,13 +49,15 @@ spark.migrate.source.maxWriteTimeStampFilter 9223372036854775
4949
#spark.migrate.source.trustStore.type JKS
5050
#spark.migrate.source.keyStore.path
5151
#spark.migrate.source.keyStore.password
52+
#spark.migrate.source.enabledAlgorithms TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA
5253

5354
####################### ONLY USE if SSL clientAuth is enabled on destination Cassandra/DSE #############################
5455
#spark.migrate.destination.trustStore.path
5556
#spark.migrate.destination.trustStore.password
5657
#spark.migrate.destination.trustStore.type JKS
5758
#spark.migrate.destination.keyStore.path
5859
#spark.migrate.destination.keyStore.password
60+
#spark.migrate.destination.enabledAlgorithms TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA
5961

6062
########################################################################################################################
6163
# Following are the supported data types and their corresponding [Cassandra data-types]

0 commit comments

Comments
 (0)