JingsongLi commented on code in PR #1454:
URL: https://github.com/apache/incubator-paimon/pull/1454#discussion_r1251657527


##########
paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/WriteIntoPaimonTable.scala:
##########
@@ -77,45 +77,56 @@ case class WriteIntoPaimonTable(_table: FileStoreTable, 
saveMode: SaveMode, data
     val toRow = withBucketDataEncoder.createSerializer()
     val fromRow = withBucketDataEncoder.createDeserializer()
 
-    val withAssignedBucket = if (isDynamicBucketTable) {
-      val partitioned = if (primaryKeyCols.nonEmpty) {
-        // Make sure that the records with the same bucket values is within a 
task.
-        withBucketCol.repartition(primaryKeyCols: _*)
-      } else {
-        withBucketCol
-      }
-      val numSparkPartitions = partitioned.rdd.getNumPartitions
-      val dynamicBucketProcessor =
-        DynamicBucketProcessor(table, rowType, bucketColIdx, 
numSparkPartitions, toRow, fromRow)
-      
partitioned.mapPartitions(dynamicBucketProcessor.processPartition)(withBucketDataEncoder)
-    } else {
+    def commonBucketProcessed = {

Review Comment:
   `def assignFixedBucket`?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to