zhztheplayer commented on code in PR #8931:
URL: https://github.com/apache/incubator-gluten/pull/8931#discussion_r2906190623
##########
backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeColumnarBuildSideRelation.scala:
##########
@@ -96,37 +109,128 @@ class UnsafeColumnarBuildSideRelation(
case _ => None
}
+ def isOffload: Boolean = offload
+
/** needed for serialization. */
def this() = {
- this(null, null, null)
+ this(null, null, null, Seq.empty, false)
}
private[unsafe] def getBatches(): Seq[UnsafeByteArray] = {
batches
}
+ private var hashTableData: Long = 0L
+
+ def buildHashTable(broadcastContext: BroadcastHashJoinContext): (Long,
BuildSideRelation) =
+ synchronized {
+ if (hashTableData == 0) {
+ val runtime = Runtimes.contextInstance(
+ BackendsApiManager.getBackendName,
+ "UnsafeColumnarBuildSideRelation#buildHashTable")
+ val jniWrapper = ColumnarBatchSerializerJniWrapper.create(runtime)
+ val serializeHandle: Long = {
+ val allocator = ArrowBufferAllocators.contextInstance()
+ val cSchema = ArrowSchema.allocateNew(allocator)
+ val arrowSchema = SparkArrowUtil.toArrowSchema(
+ SparkShimLoader.getSparkShims.structFromAttributes(output),
+ SQLConf.get.sessionLocalTimeZone)
+ ArrowAbiUtil.exportSchema(allocator, arrowSchema, cSchema)
+ val handle = jniWrapper
+ .init(cSchema.memoryAddress())
+ cSchema.close()
+ handle
+ }
+
+ val batchArray = new ArrayBuffer[Long]
+
+ var batchId = 0
+ while (batchId < batches.size) {
+ val (offset, length) = (batches(batchId).address(),
batches(batchId).size())
+ batchArray.append(jniWrapper.deserializeDirect(serializeHandle,
offset, length.toInt))
+ batchId += 1
+ }
+
+ logDebug(
+ s"BHJ value size: " +
+ s"${broadcastContext.buildHashTableId} = ${batches.size}")
+
+ val (keys, newOutput) = if (newBuildKeys.isEmpty) {
+ (
+ broadcastContext.buildSideJoinKeys.asJava,
+ broadcastContext.buildSideStructure.asJava
+ )
+ } else {
+ (
+ newBuildKeys.asJava,
+ output.asJava
+ )
+ }
+
+ val joinKey = keys.asScala
+ .map {
+ key =>
+ val attr = ConverterUtils.getAttrFromExpr(key)
+ ConverterUtils.genColumnNameWithExprId(attr)
+ }
+ .mkString(",")
+
+ // Build the hash table
+ hashTableData = HashJoinBuilder
+ .nativeBuild(
+ broadcastContext.buildHashTableId,
+ batchArray.toArray,
+ joinKey,
+ broadcastContext.substraitJoinType.ordinal(),
+ broadcastContext.hasMixedFiltCondition,
+ broadcastContext.isExistenceJoin,
+ SubstraitUtil.toNameStruct(newOutput).toByteArray,
+ broadcastContext.isNullAwareAntiJoin,
+ broadcastContext.bloomFilterPushdownSize,
+ broadcastContext.broadcastHashTableBuildThreads
+ )
+
+ jniWrapper.close(serializeHandle)
+ (hashTableData, this)
+ } else {
+ (HashJoinBuilder.cloneHashTable(hashTableData), null)
Review Comment:
Thanks for the explanation.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]