Skip to content

Commit 3b2bd0a

Browse files
committed
Added UDT support
1 parent 542d1ad commit 3b2bd0a

File tree

6 files changed

+21
-26
lines changed

6 files changed

+21
-26
lines changed

pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
<groupId>com.datastax.spark.example</groupId>
55
<artifactId>migrate</artifactId>
6-
<version>0.10</version>
6+
<version>0.11</version>
77
<packaging>jar</packaging>
88

99
<properties>
@@ -189,4 +189,4 @@
189189
</plugin>
190190
</plugins>
191191
</build>
192-
</project>
192+
</project>

src/main/java/datastax/astra/migrate/AbstractJobSession.java

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import com.datastax.oss.driver.api.core.cql.BoundStatement;
55
import com.datastax.oss.driver.api.core.cql.PreparedStatement;
66
import com.datastax.oss.driver.api.core.cql.Row;
7-
import com.datastax.oss.driver.api.core.data.TupleValue;
87
import com.datastax.oss.driver.shaded.guava.common.util.concurrent.RateLimiter;
98
import org.apache.log4j.Logger;
109
import org.apache.spark.SparkConf;
@@ -190,10 +189,6 @@ public Object getData(MigrateDataType dataType, int index, Row sourceRow) {
190189
if (data == null) {
191190
return new Long(0);
192191
}
193-
} else if (dataType.typeClass == TupleValue.class) {
194-
return sourceRow.getTupleValue(index);
195-
} else if (dataType.typeClass == Object.class) {
196-
return sourceRow.getObject(index);
197192
}
198193

199194
return sourceRow.get(index, dataType.typeClass);

src/main/java/datastax/astra/migrate/CopyJobSession.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@
22

33
import com.datastax.oss.driver.api.core.CqlSession;
44
import com.datastax.oss.driver.api.core.cql.*;
5-
import com.datastax.oss.driver.internal.core.metadata.token.Murmur3Token;
6-
import com.datastax.oss.driver.internal.core.metadata.token.RandomToken;
75
import org.apache.log4j.Logger;
86
import org.apache.spark.SparkConf;
97

src/main/java/datastax/astra/migrate/MigrateDataType.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package datastax.astra.migrate;
22

33
import com.datastax.oss.driver.api.core.data.TupleValue;
4+
import com.datastax.oss.driver.api.core.data.UdtValue;
45

56
import java.math.BigDecimal;
67
import java.nio.ByteBuffer;
@@ -75,6 +76,8 @@ private Class getType(int type) {
7576
return BigDecimal.class;
7677
case 15:
7778
return LocalDate.class;
79+
case 16:
80+
return UdtValue.class;
7881
}
7982

8083
return Object.class;

src/main/java/datastax/astra/migrate/SplitPartitions.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
package datastax.astra.migrate;
22

3-
import com.datastax.oss.driver.internal.core.metadata.token.RandomToken;
4-
import com.datastax.spark.connector.rdd.partitioner.dht.TokenFactory;
53
import org.apache.log4j.Logger;
6-
import scala.math.BigInt;
74

85
import java.io.Serializable;
96
import java.math.BigInteger;

src/resources/sparkConf.properties

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -48,21 +48,23 @@ spark.migrate.astra.data.keyspaceTable test.grid_test2
4848

4949
########################################################################################################################
5050
# Following are the supported data types and their corresponding [Cassandra data-types]
51-
# 0: String
52-
# 1: Integer
53-
# 2: Long
54-
# 3: Double
55-
# 4: Instant (datetime)
56-
# 5: Map (separate type by %) - Example: 5%1%0 for map<int, text>
57-
# 6: List (separate type by %) - Example: 6%0 for list<text>
58-
# 7: ByteBuffer (Blob)
59-
# 8: Set (seperate type by %) - Example: 8%0 for set<text>
60-
# 9: UUID
61-
# 10: Boolean
62-
# 11: TupleValue
63-
# 12: Float
64-
# 13: TinyInt
51+
# 0: String [ascii, text, varchar]
52+
# 1: Integer [int, smallint]
53+
# 2: Long [bigint, counter]
54+
# 3: Double [double]
55+
# 4: Instant [time, timestamp]
56+
# 5: Map (separate type by %) [map] - Example: 5%1%0 for map<int, text>
57+
# 6: List (separate type by %) [list] - Example: 6%0 for list<text>
58+
# 7: ByteBuffer [blob]
59+
# 8: Set (seperate type by %) [set] - Example: 8%0 for set<text>
60+
# 9: UUID [uuid, timeuuid]
61+
# 10: Boolean [boolean]
62+
# 11: TupleValue [tuple]
63+
# 12: Float (float)
64+
# 13: TinyInt [tinyint]
6565
# 14: BigDecimal (decimal)
66+
# 15: LocalDate (date)
67+
# 16: UDT [any user-defined-type created using 'CREATE TYPE']
6668
#
6769
# Note:
6870
# Enable "spark.migrate.preserveTTLWriteTime" only if you want to migrate writetimes and TTLs

0 commit comments

Comments
 (0)