Github user JoshRosen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/7480#discussion_r35151156
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
 ---
    @@ -148,3 +148,81 @@ private[joins] object HashedRelation {
         }
       }
     }
    +
    +
    +/**
    + * A HashedRelation for UnsafeRow, which is backed by BytesToBytesMap that 
maps the key into a
    + * sequence of values.
    + *
    + * TODO(davies): use BytesToBytesMap
    + */
    +private[joins] final class UnsafeHashedRelation(
    +    private var hashTable: JavaHashMap[UnsafeRow, 
CompactBuffer[UnsafeRow]])
    +  extends HashedRelation with Externalizable {
    +
    +  def this() = this(null)  // Needed for serialization
    +
    +  override def get(key: InternalRow): CompactBuffer[InternalRow] = {
    +    val unsafeKey = key.asInstanceOf[UnsafeRow]
    +    // Thanks to type eraser
    +    hashTable.get(unsafeKey).asInstanceOf[CompactBuffer[InternalRow]]
    +  }
    +
    +  override def writeExternal(out: ObjectOutput): Unit = {
    +    writeBytes(out, SparkSqlSerializer.serialize(hashTable))
    +  }
    +
    +  override def readExternal(in: ObjectInput): Unit = {
    +    hashTable = SparkSqlSerializer.deserialize(readBytes(in))
    +  }
    +}
    +
    +private[joins] object UnsafeHashedRelation {
    +
    +  def apply(
    +      input: Iterator[InternalRow],
    +      buildKeys: Seq[Expression],
    +      buildPlan: SparkPlan,
    +      sizeEstimate: Int = 64): HashedRelation = {
    +    val boundedKeys = buildKeys.map(BindReferences.bindReference(_, 
buildPlan.output))
    +    apply(input, boundedKeys, buildPlan.schema, sizeEstimate)
    +  }
    +
    +  // Used for tests
    +  def apply(
    +      input: Iterator[InternalRow],
    +      buildKeys: Seq[Expression],
    +      rowSchema: StructType,
    +      sizeEstimate: Int): HashedRelation = {
    +
    +    // TODO: Use BytesToBytesMap.
    +    val hashTable = new JavaHashMap[UnsafeRow, 
CompactBuffer[UnsafeRow]](sizeEstimate)
    +    var currentRow: InternalRow = null
    --- End diff --
    
    Minor nit: mind declaring `currentRow` as a `val` inside of the `while` 
loop in order to limit its scope?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to