This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new ac6f8bb280f [MINOR][FOLLOWUP] Remove redundant return
ac6f8bb280f is described below
commit ac6f8bb280f86f4420bbc45b60344b4adbfd7ccd
Author: panbingkun <[email protected]>
AuthorDate: Tue Jul 12 08:42:09 2022 +0900
[MINOR][FOLLOWUP] Remove redundant return
### What changes were proposed in this pull request?
Remove redundant return in scala code.
The pr followup: https://github.com/apache/spark/pull/37148
### Why are the changes needed?
Syntactic simplification.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass GA.
Closes #37157 from panbingkun/remove_redundance_return_followup.
Authored-by: panbingkun <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
mllib-local/src/main/scala/org/apache/spark/ml/linalg/BLAS.scala | 4 ----
.../spark/sql/catalyst/expressions/conditionalExpressions.scala | 4 ++--
.../src/main/scala/org/apache/spark/sql/types/ArrayType.scala | 6 +++---
.../org/apache/spark/sql/execution/streaming/FileStreamSink.scala | 2 +-
.../sql/execution/streaming/continuous/ContinuousExecution.scala | 1 -
.../execution/streaming/state/SymmetricHashJoinStateManager.scala | 8 ++++----
.../scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala | 6 +++---
.../spark/sql/execution/streaming/state/StateStoreSuite.scala | 4 ++--
.../org/apache/spark/streaming/dstream/FileInputDStream.scala | 2 +-
9 files changed, 16 insertions(+), 21 deletions(-)
diff --git a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/BLAS.scala
b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/BLAS.scala
index f93195c4750..c81dee5ef8f 100644
--- a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/BLAS.scala
+++ b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/BLAS.scala
@@ -385,7 +385,6 @@ private[spark] object BLAS extends Serializable {
"The matrix C cannot be the product of a transpose() call.
C.isTransposed must be false.")
if (alpha == 0.0 && beta == 1.0) {
// gemm: alpha is equal to 0 and beta is equal to 1. Returning C.
- return
} else if (alpha == 0.0) {
getBLAS(C.values.length).dscal(C.values.length, beta, C.values, 1)
} else {
@@ -416,7 +415,6 @@ private[spark] object BLAS extends Serializable {
s"the length of CValues must be no less than ${A.numRows} X
${B.numCols}")
if (alpha == 0.0 && beta == 1.0) {
// gemm: alpha is equal to 0 and beta is equal to 1. Returning C.
- return
} else if (alpha == 0.0) {
val n = A.numRows * B.numCols
getBLAS(n).dscal(n, beta, CValues, 1)
@@ -619,7 +617,6 @@ private[spark] object BLAS extends Serializable {
s"The rows of A don't match the number of elements of y. A:
${A.numRows}, y:${y.length}")
if (alpha == 0.0 && beta == 1.0) {
// gemv: alpha is equal to 0 and beta is equal to 1. Returning y.
- return
} else if (alpha == 0.0) {
getBLAS(A.numRows).dscal(A.numRows, beta, y, 1)
} else {
@@ -650,7 +647,6 @@ private[spark] object BLAS extends Serializable {
s"The rows of A don't match the number of elements of y. A:
${A.numRows}, y:${y.size}")
if (alpha == 0.0 && beta == 1.0) {
// gemv: alpha is equal to 0 and beta is equal to 1. Returning y.
- return
} else if (alpha == 0.0) {
scal(beta, y)
} else {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
index 7213440bebe..f506acde7c2 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
@@ -226,9 +226,9 @@ case class CaseWhen(
i += 1
}
if (elseValue.isDefined) {
- return elseValue.get.eval(input)
+ elseValue.get.eval(input)
} else {
- return null
+ null
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
index a3a2ccf5ab1..b5708bae923 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
@@ -139,11 +139,11 @@ case class ArrayType(elementType: DataType, containsNull:
Boolean) extends DataT
i += 1
}
if (leftArray.numElements() < rightArray.numElements()) {
- return -1
+ -1
} else if (leftArray.numElements() > rightArray.numElements()) {
- return 1
+ 1
} else {
- return 0
+ 0
}
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSink.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSink.scala
index 5058a1dfc3b..04a1de02ea5 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSink.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSink.scala
@@ -110,7 +110,7 @@ object FileStreamSink extends Logging {
currentPath = currentPath.getParent
}
}
- return false
+ false
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala
index 09b54889bfb..5b620eec25f 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala
@@ -269,7 +269,6 @@ class ContinuousExecution(
} catch {
case _: InterruptedException =>
// Cleanly stop the query.
- return
}
}
}, s"epoch update thread for $prettyIdString")
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/SymmetricHashJoinStateManager.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/SymmetricHashJoinStateManager.scala
index cd8b65b4855..9e8356d3fdb 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/SymmetricHashJoinStateManager.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/SymmetricHashJoinStateManager.scala
@@ -177,7 +177,7 @@ class SymmetricHashJoinStateManager(
// We only reach here if there were no satisfying keys left, which
means we're done.
finished = true
- return null
+ null
}
override def close(): Unit = {}
@@ -261,7 +261,7 @@ class SymmetricHashJoinStateManager(
}
// We tried and failed to find the next value.
- return null
+ null
}
/**
@@ -323,7 +323,7 @@ class SymmetricHashJoinStateManager(
numValues -= 1
valueRemoved = true
- return reusedRet.withNew(currentKey, currentValue.value,
currentValue.matched)
+ reusedRet.withNew(currentKey, currentValue.value, currentValue.matched)
}
override def close(): Unit = {}
@@ -629,7 +629,7 @@ class SymmetricHashJoinStateManager(
}
finished = true
- return null
+ null
}
override protected def close(): Unit = {}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
index 1aef458a352..102c971d6fd 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
@@ -602,7 +602,7 @@ trait ColumnarExpression extends Expression with
Serializable {
if (!super.equals(other)) {
return false
}
- return other.isInstanceOf[ColumnarExpression]
+ other.isInstanceOf[ColumnarExpression]
}
override def hashCode(): Int = super.hashCode()
@@ -718,7 +718,7 @@ class ColumnarProjectExec(projectList:
Seq[NamedExpression], child: SparkPlan)
if (!super.equals(other)) {
return false
}
- return other.isInstanceOf[ColumnarProjectExec]
+ other.isInstanceOf[ColumnarProjectExec]
}
override def hashCode(): Int = super.hashCode()
@@ -901,7 +901,7 @@ class ReplacedRowToColumnarExec(override val child:
SparkPlan)
if (!super.equals(other)) {
return false
}
- return other.isInstanceOf[ReplacedRowToColumnarExec]
+ other.isInstanceOf[ReplacedRowToColumnarExec]
}
override def hashCode(): Int = super.hashCode()
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala
index f74a7a0aabb..fdb28cc1d72 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala
@@ -1331,9 +1331,9 @@ object StateStoreTestsHelper {
class RenameLikeHDFSFileSystem extends RawLocalFileSystem {
override def rename(src: Path, dst: Path): Boolean = {
if (exists(dst)) {
- return false
+ false
} else {
- return super.rename(src, dst)
+ super.rename(src, dst)
}
}
}
diff --git
a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
index d46c9a22379..850628080ec 100644
---
a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
+++
b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
@@ -271,7 +271,7 @@ class FileInputDStream[K, V, F <: NewInputFormat[K, V]](
return false
}
logDebug(s"$pathStr accepted with mod time $modTime")
- return true
+ true
}
/** Generate one RDD from an array of files */
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]