Skip to content

Commit 792915c

Browse files
chetkhatrisrowen
authored andcommitted
[SPARK-22830] Scala Coding style has been improved in Spark Examples
## What changes were proposed in this pull request? * Under Spark Scala Examples: Some of the syntax were written like Java way, It has been re-written as per scala style guide. * Most of all changes are followed to println() statement. ## How was this patch tested? Since, All changes proposed are re-writing println statements in scala way, manual run used to test println. Author: chetkhatri <[email protected]> Closes #20016 from chetkhatri/scala-style-spark-examples.
1 parent c89b431 commit 792915c

17 files changed

+54
-58
lines changed

examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ object BroadcastTest {
4242
val arr1 = (0 until num).toArray
4343

4444
for (i <- 0 until 3) {
45-
println("Iteration " + i)
45+
println(s"Iteration $i")
4646
println("===========")
4747
val startTime = System.nanoTime
4848
val barr1 = sc.broadcast(arr1)

examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,10 @@ object DFSReadWriteTest {
4949
}
5050

5151
private def printUsage(): Unit = {
52-
val usage: String = "DFS Read-Write Test\n" +
53-
"\n" +
54-
"Usage: localFile dfsDir\n" +
55-
"\n" +
56-
"localFile - (string) local file to use in test\n" +
57-
"dfsDir - (string) DFS directory for read/write tests\n"
52+
val usage = """DFS Read-Write Test
53+
|Usage: localFile dfsDir
54+
|localFile - (string) local file to use in test
55+
|dfsDir - (string) DFS directory for read/write tests""".stripMargin
5856

5957
println(usage)
6058
}
@@ -69,13 +67,13 @@ object DFSReadWriteTest {
6967

7068
localFilePath = new File(args(i))
7169
if (!localFilePath.exists) {
72-
System.err.println("Given path (" + args(i) + ") does not exist.\n")
70+
System.err.println(s"Given path (${args(i)}) does not exist")
7371
printUsage()
7472
System.exit(1)
7573
}
7674

7775
if (!localFilePath.isFile) {
78-
System.err.println("Given path (" + args(i) + ") is not a file.\n")
76+
System.err.println(s"Given path (${args(i)}) is not a file")
7977
printUsage()
8078
System.exit(1)
8179
}
@@ -108,7 +106,7 @@ object DFSReadWriteTest {
108106
.getOrCreate()
109107

110108
println("Writing local file to DFS")
111-
val dfsFilename = dfsDirPath + "/dfs_read_write_test"
109+
val dfsFilename = s"$dfsDirPath/dfs_read_write_test"
112110
val fileRDD = spark.sparkContext.parallelize(fileContents)
113111
fileRDD.saveAsTextFile(dfsFilename)
114112

@@ -127,11 +125,11 @@ object DFSReadWriteTest {
127125
spark.stop()
128126

129127
if (localWordCount == dfsWordCount) {
130-
println(s"Success! Local Word Count ($localWordCount) " +
131-
s"and DFS Word Count ($dfsWordCount) agree.")
128+
println(s"Success! Local Word Count $localWordCount and " +
129+
s"DFS Word Count $dfsWordCount agree.")
132130
} else {
133-
println(s"Failure! Local Word Count ($localWordCount) " +
134-
s"and DFS Word Count ($dfsWordCount) disagree.")
131+
println(s"Failure! Local Word Count $localWordCount " +
132+
s"and DFS Word Count $dfsWordCount disagree.")
135133
}
136134

137135
}

examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ object HdfsTest {
3939
val start = System.currentTimeMillis()
4040
for (x <- mapped) { x + 2 }
4141
val end = System.currentTimeMillis()
42-
println("Iteration " + iter + " took " + (end-start) + " ms")
42+
println(s"Iteration $iter took ${end-start} ms")
4343
}
4444
spark.stop()
4545
}

examples/src/main/scala/org/apache/spark/examples/LocalALS.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,8 +129,7 @@ object LocalALS {
129129
println(s"Iteration $iter:")
130130
ms = (0 until M).map(i => updateMovie(i, ms(i), us, R)).toArray
131131
us = (0 until U).map(j => updateUser(j, us(j), ms, R)).toArray
132-
println("RMSE = " + rmse(R, ms, us))
133-
println()
132+
println(s"RMSE = ${rmse(R, ms, us)}")
134133
}
135134
}
136135

examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,10 +58,10 @@ object LocalFileLR {
5858

5959
// Initialize w to a random value
6060
val w = DenseVector.fill(D) {2 * rand.nextDouble - 1}
61-
println("Initial w: " + w)
61+
println(s"Initial w: $w")
6262

6363
for (i <- 1 to ITERATIONS) {
64-
println("On iteration " + i)
64+
println(s"On iteration $i")
6565
val gradient = DenseVector.zeros[Double](D)
6666
for (p <- points) {
6767
val scale = (1 / (1 + math.exp(-p.y * (w.dot(p.x)))) - 1) * p.y
@@ -71,7 +71,7 @@ object LocalFileLR {
7171
}
7272

7373
fileSrc.close()
74-
println("Final w: " + w)
74+
println(s"Final w: $w")
7575
}
7676
}
7777
// scalastyle:on println

examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ object LocalKMeans {
8888
kPoints.put(i, iter.next())
8989
}
9090

91-
println("Initial centers: " + kPoints)
91+
println(s"Initial centers: $kPoints")
9292

9393
while(tempDist > convergeDist) {
9494
val closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))
@@ -114,7 +114,7 @@ object LocalKMeans {
114114
}
115115
}
116116

117-
println("Final centers: " + kPoints)
117+
println(s"Final centers: $kPoints")
118118
}
119119
}
120120
// scalastyle:on println

examples/src/main/scala/org/apache/spark/examples/LocalLR.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,10 +61,10 @@ object LocalLR {
6161
val data = generateData
6262
// Initialize w to a random value
6363
val w = DenseVector.fill(D) {2 * rand.nextDouble - 1}
64-
println("Initial w: " + w)
64+
println(s"Initial w: $w")
6565

6666
for (i <- 1 to ITERATIONS) {
67-
println("On iteration " + i)
67+
println(s"On iteration $i")
6868
val gradient = DenseVector.zeros[Double](D)
6969
for (p <- data) {
7070
val scale = (1 / (1 + math.exp(-p.y * (w.dot(p.x)))) - 1) * p.y
@@ -73,7 +73,7 @@ object LocalLR {
7373
w -= gradient
7474
}
7575

76-
println("Final w: " + w)
76+
println(s"Final w: $w")
7777
}
7878
}
7979
// scalastyle:on println

examples/src/main/scala/org/apache/spark/examples/LocalPi.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ object LocalPi {
2828
val y = random * 2 - 1
2929
if (x*x + y*y <= 1) count += 1
3030
}
31-
println("Pi is roughly " + 4 * count / 100000.0)
31+
println(s"Pi is roughly ${4 * count / 100000.0}")
3232
}
3333
}
3434
// scalastyle:on println

examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ object SimpleSkewedGroupByTest {
5959
// Enforce that everything has been calculated and in cache
6060
pairs1.count
6161

62-
println("RESULT: " + pairs1.groupByKey(numReducers).count)
62+
println(s"RESULT: ${pairs1.groupByKey(numReducers).count}")
6363
// Print how many keys each reducer got (for debugging)
6464
// println("RESULT: " + pairs1.groupByKey(numReducers)
6565
// .map{case (k,v) => (k, v.size)}

examples/src/main/scala/org/apache/spark/examples/SparkALS.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -135,10 +135,8 @@ object SparkALS {
135135
.map(i => update(i, usb.value(i), msb.value, Rc.value.transpose()))
136136
.collect()
137137
usb = sc.broadcast(us) // Re-broadcast us because it was updated
138-
println("RMSE = " + rmse(R, ms, us))
139-
println()
138+
println(s"RMSE = ${rmse(R, ms, us)}")
140139
}
141-
142140
spark.stop()
143141
}
144142

0 commit comments

Comments
 (0)