Skip to content

Commit ceea02d

Browse files
committed
Upgraded pom & formatting changes
1 parent 0bc122b commit ceea02d

File tree

3 files changed

+4
-9
lines changed

3 files changed

+4
-9
lines changed

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
<groupId>datastax.astra.migrate</groupId>
55
<artifactId>cassandra-data-migrator</artifactId>
6-
<version>1.4</version>
6+
<version>1.5</version>
77
<packaging>jar</packaging>
88

99
<properties>

src/main/scala/datastax/astra/migrate/AbstractJob.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,6 @@
11
package datastax.astra.migrate
22

33
import com.datastax.spark.connector.cql.CassandraConnector
4-
import org.apache.spark.sql.SparkSession
5-
import org.slf4j.LoggerFactory
6-
7-
import java.math.BigInteger
8-
import java.lang.Long
94

105
class AbstractJob extends BaseJob {
116

src/main/scala/datastax/astra/migrate/BaseJob.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,11 @@ class BaseJob extends App {
4141
val destinationKeyStorePassword = sc.getConf.get("spark.destination.keyStore.password", "")
4242
val destinationEnabledAlgorithms = sc.getConf.get("spark.destination.enabledAlgorithms", "")
4343

44-
val minPartition = new BigInteger(sc.getConf.get("spark.source.minPartition","-9223372036854775808"))
45-
val maxPartition = new BigInteger(sc.getConf.get("spark.source.maxPartition","9223372036854775807"))
44+
val minPartition = new BigInteger(sc.getConf.get("spark.source.minPartition", "-9223372036854775808"))
45+
val maxPartition = new BigInteger(sc.getConf.get("spark.source.maxPartition", "9223372036854775807"))
4646
val coveragePercent = sc.getConf.get("spark.coveragePercent", "100")
4747
val splitSize = sc.getConf.get("spark.splitSize", "10000")
48-
val partitions = SplitPartitions.getRandomSubPartitions(BigInteger.valueOf(Long.parseLong(splitSize)), minPartition, maxPartition,Integer.parseInt(coveragePercent))
48+
val partitions = SplitPartitions.getRandomSubPartitions(BigInteger.valueOf(Long.parseLong(splitSize)), minPartition, maxPartition, Integer.parseInt(coveragePercent))
4949

5050
protected def exitSpark() = {
5151
spark.stop()

0 commit comments

Comments
 (0)