@@ -3,7 +3,7 @@ package com.datastax.spark.connector
3
3
import com .datastax .spark .connector .ccm .CcmBridge
4
4
import com .datastax .spark .connector .cluster .DefaultCluster
5
5
6
- import scala .collection . JavaConversions ._
6
+ import scala .jdk . CollectionConverters ._
7
7
import scala .concurrent .Future
8
8
import org .apache .spark .rdd .RDD
9
9
import com .datastax .spark .connector .cql .CassandraConnector
@@ -117,9 +117,9 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
117
117
118
118
val rows = results.all()
119
119
assert(rows.size() == 3 )
120
- assert(rows.exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
121
- assert(rows.exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
122
- assert(rows.exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
120
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
121
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
122
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
123
123
}
124
124
125
125
@@ -140,9 +140,9 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
140
140
141
141
val rows = results.all()
142
142
assert(rows.size() == 3 )
143
- assert(rows.exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
144
- assert(rows.exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
145
- assert(rows.exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
143
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
144
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
145
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
146
146
}
147
147
148
148
it should " allow to save beans with transient fields to Cassandra" in {
@@ -162,9 +162,9 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
162
162
163
163
val rows = results.all()
164
164
assert(rows.size() == 3 )
165
- assert(rows.exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
166
- assert(rows.exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
167
- assert(rows.exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
165
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
166
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
167
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
168
168
}
169
169
170
170
it should " allow to save beans with inherited fields to Cassandra" in {
@@ -184,7 +184,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
184
184
val rows = results.all()
185
185
186
186
rows should have size 3
187
- rows.map(row => (row.getString(" value" ), row.getInt(" key" ), row.getString(" sub_class_field" ))).toSet shouldBe Set (
187
+ rows.asScala. map(row => (row.getString(" value" ), row.getInt(" key" ), row.getString(" sub_class_field" ))).toSet shouldBe Set (
188
188
(" one" , 1 , " a" ),
189
189
(" two" , 2 , " b" ),
190
190
(" three" , 3 , " c" )
@@ -210,9 +210,9 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
210
210
211
211
val rows = results.all()
212
212
assert(rows.size() == 3 )
213
- assert(rows.exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
214
- assert(rows.exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
215
- assert(rows.exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
213
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " one" && row.getInt(" key" ) == 1 ))
214
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " two" && row.getInt(" key" ) == 2 ))
215
+ assert(rows.asScala. exists(row ⇒ row.getString(" value" ) == " three" && row.getInt(" key" ) == 3 ))
216
216
}
217
217
218
218
it should " allow to read rows as Tuple1" in {
@@ -222,7 +222,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
222
222
)).select(
223
223
" c1"
224
224
)
225
- .collect().head
225
+ .collect().asScala. head
226
226
tuple shouldBe Tuple1 (
227
227
1 : Integer
228
228
)
@@ -237,7 +237,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
237
237
)).select(
238
238
" c1" , " c2"
239
239
)
240
- .collect().head
240
+ .collect().asScala. head
241
241
tuple shouldBe Tuple2 (
242
242
1 : Integer ,
243
243
" 2"
@@ -254,7 +254,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
254
254
)).select(
255
255
" c1" , " c2" , " c3"
256
256
)
257
- .collect().head
257
+ .collect().asScala. head
258
258
tuple shouldBe Tuple3 (
259
259
1 : Integer ,
260
260
" 2" ,
@@ -273,7 +273,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
273
273
)).select(
274
274
" c1" , " c2" , " c3" , " c4"
275
275
)
276
- .collect().head
276
+ .collect().asScala. head
277
277
tuple shouldBe Tuple4 (
278
278
1 : Integer ,
279
279
" 2" ,
@@ -294,7 +294,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
294
294
)).select(
295
295
" c1" , " c2" , " c3" , " c4" , " c5"
296
296
)
297
- .collect().head
297
+ .collect().asScala. head
298
298
tuple shouldBe Tuple5 (
299
299
1 : Integer ,
300
300
" 2" ,
@@ -317,7 +317,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
317
317
)).select(
318
318
" c1" , " c2" , " c3" , " c4" , " c5" , " c6"
319
319
)
320
- .collect().head
320
+ .collect().asScala. head
321
321
tuple shouldBe Tuple6 (
322
322
1 : Integer ,
323
323
" 2" ,
@@ -342,7 +342,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
342
342
)).select(
343
343
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7"
344
344
)
345
- .collect().head
345
+ .collect().asScala. head
346
346
tuple shouldBe Tuple7 (
347
347
1 : Integer ,
348
348
" 2" ,
@@ -369,7 +369,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
369
369
)).select(
370
370
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8"
371
371
)
372
- .collect().head
372
+ .collect().asScala. head
373
373
tuple shouldBe Tuple8 (
374
374
1 : Integer ,
375
375
" 2" ,
@@ -398,7 +398,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
398
398
)).select(
399
399
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9"
400
400
)
401
- .collect().head
401
+ .collect().asScala. head
402
402
tuple shouldBe Tuple9 (
403
403
1 : Integer ,
404
404
" 2" ,
@@ -429,7 +429,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
429
429
)).select(
430
430
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10"
431
431
)
432
- .collect().head
432
+ .collect().asScala. head
433
433
tuple shouldBe Tuple10 (
434
434
1 : Integer ,
435
435
" 2" ,
@@ -462,7 +462,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
462
462
)).select(
463
463
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11"
464
464
)
465
- .collect().head
465
+ .collect().asScala. head
466
466
tuple shouldBe Tuple11 (
467
467
1 : Integer ,
468
468
" 2" ,
@@ -497,7 +497,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
497
497
)).select(
498
498
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12"
499
499
)
500
- .collect().head
500
+ .collect().asScala. head
501
501
tuple shouldBe Tuple12 (
502
502
1 : Integer ,
503
503
" 2" ,
@@ -534,7 +534,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
534
534
)).select(
535
535
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13"
536
536
)
537
- .collect().head
537
+ .collect().asScala. head
538
538
tuple shouldBe Tuple13 (
539
539
1 : Integer ,
540
540
" 2" ,
@@ -573,7 +573,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
573
573
)).select(
574
574
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14"
575
575
)
576
- .collect().head
576
+ .collect().asScala. head
577
577
tuple shouldBe Tuple14 (
578
578
1 : Integer ,
579
579
" 2" ,
@@ -614,7 +614,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
614
614
)).select(
615
615
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15"
616
616
)
617
- .collect().head
617
+ .collect().asScala. head
618
618
tuple shouldBe Tuple15 (
619
619
1 : Integer ,
620
620
" 2" ,
@@ -657,7 +657,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
657
657
)).select(
658
658
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16"
659
659
)
660
- .collect().head
660
+ .collect().asScala. head
661
661
tuple shouldBe Tuple16 (
662
662
1 : Integer ,
663
663
" 2" ,
@@ -702,7 +702,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
702
702
)).select(
703
703
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16" , " c17"
704
704
)
705
- .collect().head
705
+ .collect().asScala. head
706
706
tuple shouldBe Tuple17 (
707
707
1 : Integer ,
708
708
" 2" ,
@@ -749,7 +749,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
749
749
)).select(
750
750
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16" , " c17" , " c18"
751
751
)
752
- .collect().head
752
+ .collect().asScala. head
753
753
tuple shouldBe Tuple18 (
754
754
1 : Integer ,
755
755
" 2" ,
@@ -798,7 +798,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
798
798
)).select(
799
799
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16" , " c17" , " c18" , " c19"
800
800
)
801
- .collect().head
801
+ .collect().asScala. head
802
802
tuple shouldBe Tuple19 (
803
803
1 : Integer ,
804
804
" 2" ,
@@ -849,7 +849,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
849
849
)).select(
850
850
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16" , " c17" , " c18" , " c19" , " c20"
851
851
)
852
- .collect().head
852
+ .collect().asScala. head
853
853
tuple shouldBe Tuple20 (
854
854
1 : Integer ,
855
855
" 2" ,
@@ -902,7 +902,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
902
902
)).select(
903
903
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16" , " c17" , " c18" , " c19" , " c20" , " c21"
904
904
)
905
- .collect().head
905
+ .collect().asScala. head
906
906
tuple shouldBe Tuple21 (
907
907
1 : Integer ,
908
908
" 2" ,
@@ -957,7 +957,7 @@ class CassandraJavaUtilSpec extends SparkCassandraITFlatSpecBase with DefaultClu
957
957
)).select(
958
958
" c1" , " c2" , " c3" , " c4" , " c5" , " c6" , " c7" , " c8" , " c9" , " c10" , " c11" , " c12" , " c13" , " c14" , " c15" , " c16" , " c17" , " c18" , " c19" , " c20" , " c21" , " c22"
959
959
)
960
- .collect().head
960
+ .collect().asScala. head
961
961
tuple shouldBe Tuple22 (
962
962
1 : Integer ,
963
963
" 2" ,
0 commit comments