Github user tdas commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19327#discussion_r140900266
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamingSymmetricHashJoinExec.scala
 ---
    @@ -157,11 +164,20 @@ case class StreamingSymmetricHashJoinExec(
       override def requiredChildDistribution: Seq[Distribution] =
         ClusteredDistribution(leftKeys) :: ClusteredDistribution(rightKeys) :: 
Nil
     
    -  override def output: Seq[Attribute] = left.output ++ right.output
    +  override def output: Seq[Attribute] = joinType match {
    +    case _: InnerLike => left.output ++ right.output
    +    case LeftOuter => left.output ++ 
right.output.map(_.withNullability(true))
    +    case RightOuter => left.output.map(_.withNullability(true)) ++ 
right.output
    +    case _ =>
    +      throwBadJoinTypeException()
    +      Seq()
    --- End diff --
    
    I think if you define the return type of throwBadJoinTypeException as 
Nothing, then this Seq() wont be needed.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to