maropu commented on a change in pull request #30455:
URL: https://github.com/apache/spark/pull/30455#discussion_r529062466
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
##########
@@ -322,7 +322,9 @@ case class Literal (value: Any, dataType: DataType) extends
LeafExpression {
case (a: Array[Byte], b: Array[Byte]) => util.Arrays.equals(a, b)
case (a: ArrayBasedMapData, b: ArrayBasedMapData) =>
a.keyArray == b.keyArray && a.valueArray == b.valueArray
- case (a, b) => a != null && a.equals(b)
+ case (a: Double, b: Double) if a.isNaN && b.isNaN => true
+ case (a: Float, b: Float) if a.isNaN && b.isNaN => true
+ case (a, b) => a != null && a == b
Review comment:
(Just a question) Are the changes above related to this PR?
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/GenericArrayData.scala
##########
@@ -120,7 +120,7 @@ class GenericArrayData(val array: Array[Any]) extends
ArrayData {
if (!o2.isInstanceOf[Double] || !
java.lang.Double.isNaN(o2.asInstanceOf[Double])) {
return false
}
- case _ => if (!o1.equals(o2)) {
+ case _ => if (o1.getClass != o2.getClass || o1 != o2) {
Review comment:
ditto: Is this change above related to this PR?
##########
File path:
streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala
##########
@@ -293,7 +293,7 @@ private[streaming] object FileBasedWriteAheadLog {
val startTime = startTimeStr.toLong
val stopTime = stopTimeStr.toLong
Some(LogInfo(startTime, stopTime, file.toString))
- case None =>
+ case None | Some(_) =>
Review comment:
We cannot write it like `case _ =>` here?
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
##########
@@ -362,7 +362,7 @@ case class Join(
left.constraints
case RightOuter =>
right.constraints
- case FullOuter =>
+ case _ =>
Review comment:
nit: for readability, how about leaving the comment like this?
```
case _ => // FullOuter
```
##########
File path: core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
##########
@@ -757,15 +757,15 @@ private[spark] object JsonProtocol {
def taskResourceRequestMapFromJson(json: JValue): Map[String,
TaskResourceRequest] = {
val jsonFields = json.asInstanceOf[JObject].obj
- jsonFields.map { case JField(k, v) =>
+ jsonFields.collect { case JField(k, v) =>
Review comment:
Ah, I see. The compilation of `map` in this case fails in v2.13.4?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]