This is an automated email from the ASF dual-hosted git repository.

huajianlan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new f6f5df3ec33 [fix](planner) Optimize local shuffle with bucket shuffle 
join (#57768)
f6f5df3ec33 is described below

commit f6f5df3ec33d109010565c1440913813ab0e2bca
Author: 924060929 <[email protected]>
AuthorDate: Fri Nov 7 14:56:11 2025 +0800

    [fix](planner) Optimize local shuffle with bucket shuffle join (#57768)
    
    fix a performance degradation because bucket shuffle join assign bucket
    index to backend skew, introduced by #56201.
    the problem slow down query09 of TPC-DS sf1000 from 17s to 1 min 9s
---
 .../job/UnassignedScanBucketOlapTableJob.java      | 62 ++++++++++------
 .../java/org/apache/doris/qe/LocalShuffleTest.java |  2 +-
 .../doris/qe/LocalShuffleWithBucketJoinTest.java   | 86 ++++++++++++++++++++++
 .../apache/doris/utframe/TestWithFeService.java    |  3 +-
 4 files changed, 129 insertions(+), 24 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/worker/job/UnassignedScanBucketOlapTableJob.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/worker/job/UnassignedScanBucketOlapTableJob.java
index 8c8c46ede41..472fafe4513 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/worker/job/UnassignedScanBucketOlapTableJob.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/worker/job/UnassignedScanBucketOlapTableJob.java
@@ -204,34 +204,52 @@ public class UnassignedScanBucketOlapTableJob extends 
AbstractUnassignedScanJob
         BucketScanSource serialScanSource = split.key();
         BucketScanSource nonSerialScanSource = split.value();
 
-        List<BucketScanSource> parallelizedSources = (List) 
nonSerialScanSource.parallelize(scanNodes, instanceNum);
-        BucketScanSource firstInstanceScanSource = serialScanSource;
-        if (!parallelizedSources.isEmpty()) {
-            firstInstanceScanSource = 
serialScanSource.newMergeBucketScanSource(parallelizedSources.get(0));
-        }
+        BucketScanSource shareScanSource = (BucketScanSource) scanSource;
+        ScanSource emptyShareScanSource = shareScanSource.newEmpty();
         int shareScanId = shareScanIdGenerator.getAndIncrement();
-        LocalShuffleBucketJoinAssignedJob firstInstance = new 
LocalShuffleBucketJoinAssignedJob(
-                instances.size(), shareScanId, context.nextInstanceId(),
-                this, worker,
-                firstInstanceScanSource,
-                
Utils.fastToImmutableSet(firstInstanceScanSource.bucketIndexToScanNodeToTablets.keySet())
-        );
-        instances.add(firstInstance);
-
-        for (int i = 1; i < parallelizedSources.size(); i++) {
-            BucketScanSource nonFirstInstanceScanSource = 
parallelizedSources.get(i);
-            LocalShuffleBucketJoinAssignedJob instance = new 
LocalShuffleBucketJoinAssignedJob(
+        int existsInstanceNum = instances.size();
+        if (nonSerialScanSource.isEmpty()) {
+            List<BucketScanSource> assignedJoinBuckets
+                    = (List) serialScanSource.parallelize(scanNodes, 
instanceNum);
+            for (int i = 0; i < assignedJoinBuckets.size(); i++) {
+                BucketScanSource assignedJoinBucket = 
assignedJoinBuckets.get(i);
+                LocalShuffleBucketJoinAssignedJob instance = new 
LocalShuffleBucketJoinAssignedJob(
+                        instances.size(), shareScanId, 
context.nextInstanceId(),
+                        this, worker,
+                        // only first instance to scan all data
+                        i == 0 ? serialScanSource : emptyShareScanSource,
+                        // but join can assign to multiple instances
+                        
Utils.fastToImmutableSet(assignedJoinBucket.bucketIndexToScanNodeToTablets.keySet())
+                );
+                instances.add(instance);
+            }
+        } else {
+            List<BucketScanSource> parallelizedSources
+                    = (List) nonSerialScanSource.parallelize(scanNodes, 
instanceNum);
+            BucketScanSource firstInstanceScanSource
+                    = 
serialScanSource.newMergeBucketScanSource(parallelizedSources.get(0));
+            LocalShuffleBucketJoinAssignedJob firstInstance = new 
LocalShuffleBucketJoinAssignedJob(
                     instances.size(), shareScanId, context.nextInstanceId(),
                     this, worker,
-                    nonFirstInstanceScanSource,
-                    
Utils.fastToImmutableSet(nonFirstInstanceScanSource.bucketIndexToScanNodeToTablets.keySet())
+                    firstInstanceScanSource,
+                    
Utils.fastToImmutableSet(parallelizedSources.get(0).bucketIndexToScanNodeToTablets.keySet())
             );
-            instances.add(instance);
+            instances.add(firstInstance);
+
+            for (int i = 1; i < parallelizedSources.size(); i++) {
+                BucketScanSource nonFirstInstanceScanSource = 
parallelizedSources.get(i);
+                LocalShuffleBucketJoinAssignedJob instance = new 
LocalShuffleBucketJoinAssignedJob(
+                        instances.size(), shareScanId, 
context.nextInstanceId(),
+                        this, worker,
+                        nonFirstInstanceScanSource,
+                        
Utils.fastToImmutableSet(nonFirstInstanceScanSource.bucketIndexToScanNodeToTablets.keySet())
+                );
+                instances.add(instance);
+            }
         }
 
-        BucketScanSource shareScanSource = (BucketScanSource) scanSource;
-        ScanSource emptyShareScanSource = shareScanSource.newEmpty();
-        for (int i = instances.size(); i < instanceNum; ++i) {
+        int thisWorkerInstanceNum = instances.size() - existsInstanceNum;
+        for (int i = thisWorkerInstanceNum; i < instanceNum; ++i) {
             LocalShuffleBucketJoinAssignedJob instance = new 
LocalShuffleBucketJoinAssignedJob(
                     instances.size(), shareScanId, context.nextInstanceId(),
                     this, worker, emptyShareScanSource,
diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleTest.java
index 725cba2376f..10617bb1240 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleTest.java
@@ -34,7 +34,7 @@ import java.util.Collection;
 import java.util.Map;
 import java.util.Map.Entry;
 
-public class LocalShuffleTest extends TestWithFeService  {
+public class LocalShuffleTest extends TestWithFeService {
     @Override
     protected int backendNum() {
         return 2;
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleWithBucketJoinTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleWithBucketJoinTest.java
new file mode 100644
index 00000000000..0587f8ba61e
--- /dev/null
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/qe/LocalShuffleWithBucketJoinTest.java
@@ -0,0 +1,86 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.qe;
+
+import org.apache.doris.nereids.NereidsPlanner;
+import org.apache.doris.nereids.trees.plans.distribute.DistributedPlan;
+import org.apache.doris.nereids.trees.plans.distribute.PipelineDistributedPlan;
+import 
org.apache.doris.nereids.trees.plans.distribute.worker.DistributedPlanWorker;
+import org.apache.doris.nereids.trees.plans.distribute.worker.job.AssignedJob;
+import 
org.apache.doris.nereids.trees.plans.distribute.worker.job.LocalShuffleBucketJoinAssignedJob;
+import org.apache.doris.utframe.TestWithFeService;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+import java.util.Collection;
+import java.util.List;
+
+public class LocalShuffleWithBucketJoinTest extends TestWithFeService {
+    private static final int beNum = 3;
+    private static final int bucketNum = 20;
+
+    @Override
+    protected int backendNum() {
+        return beNum;
+    }
+
+    @Override
+    protected void runBeforeAll() throws Exception {
+        createDatabase("test");
+        useDatabase("test");
+        createTable("CREATE TABLE `test_tbl` (\n"
+                + "  `id` int\n"
+                + ") ENGINE=OLAP\n"
+                + "distributed by hash(id) buckets " + bucketNum + "\n"
+                + "PROPERTIES (\n"
+                + "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+                + ")");
+    }
+
+    @Test
+    public void test() throws Exception {
+        int parallelPipelineTaskNum = 10;
+        executeNereidsSql("set parallel_pipeline_task_num=" + 
parallelPipelineTaskNum);
+        StmtExecutor stmtExecutor = executeNereidsSql(
+                "explain distributed plan select * from test_tbl a join 
test_tbl b on a.id = b.id");
+        NereidsPlanner planner = (NereidsPlanner) stmtExecutor.planner();
+        List<DistributedPlan> distributedPlans = 
planner.getDistributedPlans().valueList();
+        Assertions.assertEquals(1, distributedPlans.size());
+        PipelineDistributedPlan distributedPlan
+                = (PipelineDistributedPlan) distributedPlans.get(0);
+        List<AssignedJob> instances = distributedPlan.getInstanceJobs();
+        Assertions.assertEquals(beNum * parallelPipelineTaskNum, 
instances.size());
+        
Assertions.assertTrue(instances.stream().allMatch(LocalShuffleBucketJoinAssignedJob.class::isInstance));
+
+        long assignedBucketInstanceNum = 
instances.stream().map(LocalShuffleBucketJoinAssignedJob.class::cast)
+                .filter(a -> !a.getAssignedJoinBucketIndexes().isEmpty())
+                .count();
+        Assertions.assertEquals(bucketNum, assignedBucketInstanceNum);
+
+        Multimap<DistributedPlanWorker, AssignedJob> workerToInstances = 
ArrayListMultimap.create();
+        for (AssignedJob instance : instances) {
+            workerToInstances.put(instance.getAssignedWorker(), instance);
+        }
+        for (Collection<AssignedJob> instancePerBe : 
workerToInstances.asMap().values()) {
+            Assertions.assertEquals(parallelPipelineTaskNum, 
instancePerBe.size());
+        }
+    }
+}
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java 
b/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java
index 21c45c7135c..fae9de25205 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java
@@ -574,7 +574,7 @@ public abstract class TestWithFeService {
         }
     }
 
-    public void executeNereidsSql(String queryStr) throws Exception {
+    public StmtExecutor executeNereidsSql(String queryStr) throws Exception {
         connectContext.getState().reset();
 
         StatementContext statementContext = new 
StatementContext(connectContext, new OriginStatement(queryStr, 0));
@@ -590,6 +590,7 @@ public abstract class TestWithFeService {
                 || connectContext.getState().getErrorCode() != null) {
             throw new 
IllegalStateException(connectContext.getState().getErrorMessage());
         }
+        return stmtExecutor;
     }
 
     public void createDatabase(String db) throws Exception {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to