Skip to content

Commit 4cf9d14

Browse files
panbingkundongjoon-hyun
authored andcommitted
[SPARK-49806][SQL][CONNECT] Remove redundant blank space after show in Scala and Connect clients
### What changes were proposed in this pull request? The pr aims to remove redundant `blank space` after `show` in `Scala` and `Connect` clients. ### Why are the changes needed? Make the output of `show` in `scala` and `connect` both end with `1 blank space`, making it look more `consistent` in style. #### A.Scala (`spark-shell`) - Before: <img width="1400" alt="image" src="https://github.com/user-attachments/assets/cccd947d-176e-49d1-a6e2-9553c267837a"> **Note: Other command end with `1 blank space`, while `show` end with `2 blank spaces`.** - After: <img width="1397" alt="image" src="https://github.com/user-attachments/assets/83253433-5339-458f-9f14-5ae7c442e7a3"> #### B.Connect (`spark-connect-scala-client`) - Before: <img width="911" alt="image" src="https://github.com/user-attachments/assets/fc625539-d14c-499f-95cc-7e545a8a1bbe"> <img width="908" alt="image" src="https://github.com/user-attachments/assets/7fb93845-bbef-4001-9a0c-97a771852c4a"> **Note: Other command end with `1 blank space`, while `show` end with `3 blank spaces`.** - After: <img width="914" alt="image" src="https://github.com/user-attachments/assets/c31aa97e-03a1-467e-abb2-5d1c07dcc156"> <img width="911" alt="image" src="https://github.com/user-attachments/assets/7144a4e1-7552-4700-9b0e-658774236ab4"> ### Does this PR introduce _any_ user-facing change? Yes, command are separated by `1 blank space` between them, and 'show' is no longer abrupt. ### How was this patch tested? Manually check. ### Was this patch authored or co-authored using generative AI tooling? No. Closes apache#48277 from panbingkun/SPARK-49806. Authored-by: panbingkun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 13e5504 commit 4cf9d14

File tree

6 files changed

+11
-20
lines changed

6 files changed

+11
-20
lines changed

connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -274,9 +274,7 @@ class Dataset[T] private[sql] (
274274
df.withResult { result =>
275275
assert(result.length == 1)
276276
assert(result.schema.size == 1)
277-
// scalastyle:off println
278-
println(result.toArray.head)
279-
// scalastyle:on println
277+
print(result.toArray.head)
280278
}
281279
}
282280

python/pyspark/ml/fpm.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,6 @@ class FPGrowth(
213213
| [q]| 2|
214214
+---------+----+
215215
only showing top 5 rows
216-
...
217216
>>> fpm.associationRules.sort("antecedent", "consequent").show(5)
218217
+----------+----------+----------+----+------------------+
219218
|antecedent|consequent|confidence|lift| support|
@@ -225,7 +224,6 @@ class FPGrowth(
225224
| [q]| [t]| 1.0| 2.0|0.3333333333333333|
226225
+----------+----------+----------+----+------------------+
227226
only showing top 5 rows
228-
...
229227
>>> new_data = spark.createDataFrame([(["t", "s"], )], ["items"])
230228
>>> sorted(fpm.transform(new_data).first().newPrediction)
231229
['x', 'y', 'z']

python/pyspark/sql/tests/test_dataframe.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -671,8 +671,7 @@ def test_repr_behaviors(self):
671671
|+---+-----+
672672
|| 1| 1|
673673
|+---+-----+
674-
|only showing top 1 row
675-
|"""
674+
|only showing top 1 row"""
676675
self.assertEqual(re.sub(pattern, "", expected3), df.__repr__())
677676

678677
# test when eager evaluation is enabled and _repr_html_ will be called

sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -412,11 +412,11 @@ class Dataset[T] private[sql](
412412
// Print a footer
413413
if (vertical && rows.tail.isEmpty) {
414414
// In a vertical mode, print an empty row set explicitly
415-
sb.append("(0 rows)\n")
415+
sb.append("(0 rows)")
416416
} else if (hasMoreData) {
417417
// For Data that has more than "numRows" records
418418
val rowsString = if (numRows == 1) "row" else "rows"
419-
sb.append(s"only showing top $numRows $rowsString\n")
419+
sb.append(s"only showing top $numRows $rowsString")
420420
}
421421

422422
sb.toString()

sql/core/src/test/scala/org/apache/spark/sql/DataFrameShowSuite.scala

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -112,13 +112,12 @@ class DataFrameShowSuite extends QueryTest with SharedSparkSession {
112112
||key|value|
113113
|+---+-----+
114114
|+---+-----+
115-
|only showing top 0 rows
116-
|""".stripMargin
115+
|only showing top 0 rows""".stripMargin
117116
assert(testData.select($"*").showString(-1) === expectedAnswer)
118117
}
119118

120119
test("showString(negative), vertical = true") {
121-
val expectedAnswer = "(0 rows)\n"
120+
val expectedAnswer = "(0 rows)"
122121
assert(testData.select($"*").showString(-1, vertical = true) === expectedAnswer)
123122
}
124123

@@ -127,8 +126,7 @@ class DataFrameShowSuite extends QueryTest with SharedSparkSession {
127126
||key|value|
128127
|+---+-----+
129128
|+---+-----+
130-
|only showing top 0 rows
131-
|""".stripMargin
129+
|only showing top 0 rows""".stripMargin
132130
assert(testData.select($"*").showString(0) === expectedAnswer)
133131
}
134132

@@ -145,7 +143,7 @@ class DataFrameShowSuite extends QueryTest with SharedSparkSession {
145143
}
146144

147145
test("showString(0), vertical = true") {
148-
val expectedAnswer = "(0 rows)\n"
146+
val expectedAnswer = "(0 rows)"
149147
assert(testData.select($"*").showString(0, vertical = true) === expectedAnswer)
150148
}
151149

@@ -286,16 +284,15 @@ class DataFrameShowSuite extends QueryTest with SharedSparkSession {
286284
|+---+-----+
287285
|| 1| 1|
288286
|+---+-----+
289-
|only showing top 1 row
290-
|""".stripMargin
287+
|only showing top 1 row""".stripMargin
291288
assert(testData.select($"*").showString(1) === expectedAnswer)
292289
}
293290

294291
test("SPARK-7319 showString, vertical = true") {
295292
val expectedAnswer = "-RECORD 0----\n" +
296293
" key | 1 \n" +
297294
" value | 1 \n" +
298-
"only showing top 1 row\n"
295+
"only showing top 1 row"
299296
assert(testData.select($"*").showString(1, vertical = true) === expectedAnswer)
300297
}
301298

@@ -337,7 +334,7 @@ class DataFrameShowSuite extends QueryTest with SharedSparkSession {
337334
}
338335

339336
test("SPARK-7327 show with empty dataFrame, vertical = true") {
340-
assert(testData.select($"*").filter($"key" < 0).showString(1, vertical = true) === "(0 rows)\n")
337+
assert(testData.select($"*").filter($"key" < 0).showString(1, vertical = true) === "(0 rows)")
341338
}
342339

343340
test("SPARK-18350 show with session local timezone") {

sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ConsoleWriteSupportSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@ class ConsoleWriteSupportSuite extends StreamTest {
102102
|| 2|
103103
|+-----+
104104
|only showing top 2 rows
105-
|
106105
|""".stripMargin)
107106
}
108107

0 commit comments

Comments
 (0)