This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 4cf9d14d84c0 [SPARK-49806][SQL][CONNECT] Remove redundant `blank 
space` after `show` in `Scala` and `Connect` clients
4cf9d14d84c0 is described below

commit 4cf9d14d84c0061ae2476d97256daf0b2062ed54
Author: panbingkun <[email protected]>
AuthorDate: Fri Oct 4 10:08:18 2024 -0700

    [SPARK-49806][SQL][CONNECT] Remove redundant `blank space` after `show` in 
`Scala` and `Connect` clients
    
    ### What changes were proposed in this pull request?
    The pr aims to remove redundant `blank space` after `show` in `Scala` and 
`Connect` clients.
    
    ### Why are the changes needed?
    Make the output of `show` in `scala` and `connect` both end with `1 blank 
space`, making it look more `consistent` in style.
    
    #### A.Scala (`spark-shell`)
    - Before:
    <img width="1400" alt="image" 
src="https://github.com/user-attachments/assets/cccd947d-176e-49d1-a6e2-9553c267837a";>
    
    **Note: Other command end with `1 blank space`, while `show` end with `2 
blank spaces`.**
    
    - After:
    <img width="1397" alt="image" 
src="https://github.com/user-attachments/assets/83253433-5339-458f-9f14-5ae7c442e7a3";>
    
    #### B.Connect (`spark-connect-scala-client`)
    - Before:
    <img width="911" alt="image" 
src="https://github.com/user-attachments/assets/fc625539-d14c-499f-95cc-7e545a8a1bbe";>
    <img width="908" alt="image" 
src="https://github.com/user-attachments/assets/7fb93845-bbef-4001-9a0c-97a771852c4a";>
    
    **Note: Other command end with `1 blank space`, while `show` end with `3 
blank spaces`.**
    
    - After:
    <img width="914" alt="image" 
src="https://github.com/user-attachments/assets/c31aa97e-03a1-467e-abb2-5d1c07dcc156";>
    <img width="911" alt="image" 
src="https://github.com/user-attachments/assets/7144a4e1-7552-4700-9b0e-658774236ab4";>
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, command are separated by `1 blank space` between them, and 'show' is 
no longer abrupt.
    
    ### How was this patch tested?
    Manually check.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #48277 from panbingkun/SPARK-49806.
    
    Authored-by: panbingkun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../src/main/scala/org/apache/spark/sql/Dataset.scala   |  4 +---
 python/pyspark/ml/fpm.py                                |  2 --
 python/pyspark/sql/tests/test_dataframe.py              |  3 +--
 .../src/main/scala/org/apache/spark/sql/Dataset.scala   |  4 ++--
 .../scala/org/apache/spark/sql/DataFrameShowSuite.scala | 17 +++++++----------
 .../streaming/sources/ConsoleWriteSupportSuite.scala    |  1 -
 6 files changed, 11 insertions(+), 20 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
index 6bae04ef8023..a368da2aaee6 100644
--- 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -274,9 +274,7 @@ class Dataset[T] private[sql] (
     df.withResult { result =>
       assert(result.length == 1)
       assert(result.schema.size == 1)
-      // scalastyle:off println
-      println(result.toArray.head)
-      // scalastyle:on println
+      print(result.toArray.head)
     }
   }
 
diff --git a/python/pyspark/ml/fpm.py b/python/pyspark/ml/fpm.py
index cba4219a0694..72fcfccf19e4 100644
--- a/python/pyspark/ml/fpm.py
+++ b/python/pyspark/ml/fpm.py
@@ -213,7 +213,6 @@ class FPGrowth(
     |      [q]|   2|
     +---------+----+
     only showing top 5 rows
-    ...
     >>> fpm.associationRules.sort("antecedent", "consequent").show(5)
     +----------+----------+----------+----+------------------+
     |antecedent|consequent|confidence|lift|           support|
@@ -225,7 +224,6 @@ class FPGrowth(
     |       [q]|       [t]|       1.0| 2.0|0.3333333333333333|
     +----------+----------+----------+----+------------------+
     only showing top 5 rows
-    ...
     >>> new_data = spark.createDataFrame([(["t", "s"], )], ["items"])
     >>> sorted(fpm.transform(new_data).first().newPrediction)
     ['x', 'y', 'z']
diff --git a/python/pyspark/sql/tests/test_dataframe.py 
b/python/pyspark/sql/tests/test_dataframe.py
index 8ec0839ec1fe..b5af00a4e7b7 100644
--- a/python/pyspark/sql/tests/test_dataframe.py
+++ b/python/pyspark/sql/tests/test_dataframe.py
@@ -671,8 +671,7 @@ class DataFrameTestsMixin:
                     |+---+-----+
                     ||  1|    1|
                     |+---+-----+
-                    |only showing top 1 row
-                    |"""
+                    |only showing top 1 row"""
                     self.assertEqual(re.sub(pattern, "", expected3), 
df.__repr__())
 
         # test when eager evaluation is enabled and _repr_html_ will be called
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 18fc5787a158..58006837a3a6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -412,11 +412,11 @@ class Dataset[T] private[sql](
     // Print a footer
     if (vertical && rows.tail.isEmpty) {
       // In a vertical mode, print an empty row set explicitly
-      sb.append("(0 rows)\n")
+      sb.append("(0 rows)")
     } else if (hasMoreData) {
       // For Data that has more than "numRows" records
       val rowsString = if (numRows == 1) "row" else "rows"
-      sb.append(s"only showing top $numRows $rowsString\n")
+      sb.append(s"only showing top $numRows $rowsString")
     }
 
     sb.toString()
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameShowSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameShowSuite.scala
index d728cc5810a2..86d3ca45fd08 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameShowSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameShowSuite.scala
@@ -112,13 +112,12 @@ class DataFrameShowSuite extends QueryTest with 
SharedSparkSession {
                            ||key|value|
                            |+---+-----+
                            |+---+-----+
-                           |only showing top 0 rows
-                           |""".stripMargin
+                           |only showing top 0 rows""".stripMargin
     assert(testData.select($"*").showString(-1) === expectedAnswer)
   }
 
   test("showString(negative), vertical = true") {
-    val expectedAnswer = "(0 rows)\n"
+    val expectedAnswer = "(0 rows)"
     assert(testData.select($"*").showString(-1, vertical = true) === 
expectedAnswer)
   }
 
@@ -127,8 +126,7 @@ class DataFrameShowSuite extends QueryTest with 
SharedSparkSession {
                            ||key|value|
                            |+---+-----+
                            |+---+-----+
-                           |only showing top 0 rows
-                           |""".stripMargin
+                           |only showing top 0 rows""".stripMargin
     assert(testData.select($"*").showString(0) === expectedAnswer)
   }
 
@@ -145,7 +143,7 @@ class DataFrameShowSuite extends QueryTest with 
SharedSparkSession {
   }
 
   test("showString(0), vertical = true") {
-    val expectedAnswer = "(0 rows)\n"
+    val expectedAnswer = "(0 rows)"
     assert(testData.select($"*").showString(0, vertical = true) === 
expectedAnswer)
   }
 
@@ -286,8 +284,7 @@ class DataFrameShowSuite extends QueryTest with 
SharedSparkSession {
                            |+---+-----+
                            ||  1|    1|
                            |+---+-----+
-                           |only showing top 1 row
-                           |""".stripMargin
+                           |only showing top 1 row""".stripMargin
     assert(testData.select($"*").showString(1) === expectedAnswer)
   }
 
@@ -295,7 +292,7 @@ class DataFrameShowSuite extends QueryTest with 
SharedSparkSession {
     val expectedAnswer = "-RECORD 0----\n" +
                          " key   | 1   \n" +
                          " value | 1   \n" +
-                         "only showing top 1 row\n"
+                         "only showing top 1 row"
     assert(testData.select($"*").showString(1, vertical = true) === 
expectedAnswer)
   }
 
@@ -337,7 +334,7 @@ class DataFrameShowSuite extends QueryTest with 
SharedSparkSession {
   }
 
   test("SPARK-7327 show with empty dataFrame, vertical = true") {
-    assert(testData.select($"*").filter($"key" < 0).showString(1, vertical = 
true) === "(0 rows)\n")
+    assert(testData.select($"*").filter($"key" < 0).showString(1, vertical = 
true) === "(0 rows)")
   }
 
   test("SPARK-18350 show with session local timezone") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ConsoleWriteSupportSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ConsoleWriteSupportSuite.scala
index 97b95eb402b7..b5f23853fd5b 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ConsoleWriteSupportSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ConsoleWriteSupportSuite.scala
@@ -102,7 +102,6 @@ class ConsoleWriteSupportSuite extends StreamTest {
         ||    2|
         |+-----+
         |only showing top 2 rows
-        |
         |""".stripMargin)
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to