This is an automated email from the ASF dual-hosted git repository.

alsuliman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git


The following commit(s) were added to refs/heads/master by this push:
     new 129186c8f2 [ASTERIXDB-3144][RT] Pass partitions map to hash partitioner
129186c8f2 is described below

commit 129186c8f23b66f1eff10f9bd472a2a3e1854d6e
Author: Ali Alsuliman <[email protected]>
AuthorDate: Tue May 9 14:48:49 2023 -0700

    [ASTERIXDB-3144][RT] Pass partitions map to hash partitioner
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    Pass the compute to storage partitions map to the
    hash partitioner that is used during rebalancing datasets.
    
    Change-Id: I9a66d37e7f239bd34e224550fb097a43d5da0a55
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/17522
    Integration-Tests: Jenkins <[email protected]>
    Reviewed-by: Murtadha Hubail <[email protected]>
    Tested-by: Jenkins <[email protected]>
---
 .../asterix/app/function/QueryIndexRewriter.java   |  3 ---
 .../org/apache/asterix/utils/RebalanceUtil.java    |  6 ++++--
 .../apache/asterix/metadata/utils/DatasetUtil.java |  6 ++----
 .../physical/BroadcastExchangePOperator.java       |  5 ++---
 .../physical/HashPartitionExchangePOperator.java   |  8 ++++++--
 .../HashPartitionMergeExchangePOperator.java       | 14 ++++++++-----
 .../physical/RandomMergeExchangePOperator.java     |  7 +++----
 .../physical/SortMergeExchangePOperator.java       | 14 ++++++-------
 .../FieldHashPartitionComputerFactory.java         | 14 ++++++++-----
 .../common/data/partition/HashPartitioner.java     |  5 ++++-
 ...tyAwareMToNPartitioningConnectorDescriptor.java | 21 ++-----------------
 .../join/InMemoryHashJoinOperatorDescriptor.java   |  4 ++--
 .../btree/client/InsertPipelineExample.java        |  2 +-
 .../btree/client/PrimaryIndexBulkLoadExample.java  |  2 +-
 .../hyracks/tests/integration/AggregationTest.java | 24 +++++++++++-----------
 .../hyracks/tests/integration/CancelJobTest.java   |  2 +-
 .../tests/integration/CountOfCountsTest.java       | 12 +++++------
 .../tests/integration/HeapSortMergeTest.java       |  4 ++--
 .../integration/LocalityAwareConnectorTest.java    |  4 ++--
 .../hyracks/tests/integration/ScanPrintTest.java   |  8 ++++----
 .../hyracks/tests/integration/SortMergeTest.java   |  4 ++--
 .../integration/TPCHCustomerOrderHashJoinTest.java | 16 +++++++--------
 .../tests/integration/VSizeFrameSortMergeTest.java |  2 +-
 .../examples/text/client/WordCountMain.java        |  4 ++--
 .../hyracks/examples/tpch/client/Groupby.java      |  2 +-
 .../apache/hyracks/examples/tpch/client/Join.java  |  6 +++---
 .../apache/hyracks/examples/tpch/client/Sort.java  |  2 +-
 27 files changed, 96 insertions(+), 105 deletions(-)

diff --git 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
index 3d96595ec4..0aa66ddbbd 100644
--- 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
+++ 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
@@ -63,7 +63,6 @@ import 
org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceSc
 import 
org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain;
 import org.apache.hyracks.api.exceptions.SourceLocation;
-import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import org.apache.hyracks.util.LogRedactionUtil;
 
 public class QueryIndexRewriter extends FunctionRewriter implements 
IResultTypeComputer {
@@ -122,8 +121,6 @@ public class QueryIndexRewriter extends FunctionRewriter 
implements IResultTypeC
             SourceLocation loc, AbstractFunctionCallExpression f) throws 
AlgebricksException {
         ISecondaryIndexOperationsHelper secIdxHelper =
                 SecondaryIndexOperationsHelper.createIndexOperationsHelper(ds, 
idx, mp, loc);
-        new 
IndexDataflowHelperFactory(mp.getStorageComponentProvider().getStorageManager(),
-                secIdxHelper.getSecondaryFileSplitProvider());
         AlgebricksAbsolutePartitionConstraint secPartitionConstraint =
                 (AlgebricksAbsolutePartitionConstraint) 
secIdxHelper.getSecondaryPartitionConstraint();
         INodeDomain domain = mp.findNodeDomain(ds.getNodeGroupName());
diff --git 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
index 619c2cc891..f315f186a4 100644
--- 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
+++ 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
@@ -318,8 +318,10 @@ public class RebalanceUtil {
         // Connects scan and upsert.
         int numKeys = target.getPrimaryKeys().size();
         int[] keys = IntStream.range(0, numKeys).toArray();
-        IConnectorDescriptor connectorDescriptor = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keys, 
target.getPrimaryHashFunctionFactories(metadataProvider)));
+        int[][] partitionsMap = 
metadataProvider.getPartitioningProperties(target).getComputeStorageMap();
+        IConnectorDescriptor connectorDescriptor =
+                new MToNPartitioningConnectorDescriptor(spec, 
FieldHashPartitionComputerFactory.withMap(keys,
+                        
target.getPrimaryHashFunctionFactories(metadataProvider), partitionsMap));
         spec.connect(connectorDescriptor, primaryScanOp, 0, upsertOp, 0);
 
         // Connects upsert and sink.
diff --git 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index e882129823..2ce94355bc 100644
--- 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -337,8 +337,8 @@ public class DatasetUtil {
         PartitioningProperties partitioningProperties = 
metadataProvider.getPartitioningProperties(dataset);
         FileSplit[] fs = 
partitioningProperties.getSpiltsProvider().getFileSplits();
         StringBuilder sb = new StringBuilder();
-        for (int i = 0; i < fs.length; i++) {
-            sb.append(fs[i] + " ");
+        for (FileSplit f : fs) {
+            sb.append(f).append(" ");
         }
         LOGGER.info("CREATING File Splits: {}", sb);
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
@@ -374,8 +374,6 @@ public class DatasetUtil {
                 indexHelperFactory, 
partitioningProperties.getComputeStorageMap());
         
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, 
compactOp,
                 partitioningProperties.getConstraints());
-        
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, 
compactOp,
-                partitioningProperties.getConstraints());
         spec.addRoot(compactOp);
         return spec;
     }
diff --git 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BroadcastExchangePOperator.java
 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BroadcastExchangePOperator.java
index 835cc14e6c..eef2de25ba 100644
--- 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BroadcastExchangePOperator.java
+++ 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BroadcastExchangePOperator.java
@@ -28,7 +28,6 @@ import 
org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
 import org.apache.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
 import 
org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
 import 
org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
-import 
org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
 import org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain;
 import 
org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
 import 
org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
@@ -41,7 +40,7 @@ import 
org.apache.hyracks.dataflow.std.connectors.MToNBroadcastConnectorDescript
 
 public class BroadcastExchangePOperator extends AbstractExchangePOperator {
 
-    private INodeDomain domain;
+    private final INodeDomain domain;
 
     public BroadcastExchangePOperator(INodeDomain domain) {
         this.domain = domain;
@@ -56,7 +55,7 @@ public class BroadcastExchangePOperator extends 
AbstractExchangePOperator {
     public void computeDeliveredProperties(ILogicalOperator op, 
IOptimizationContext context) {
         IPartitioningProperty pp = new BroadcastPartitioningProperty(domain);
         // Broadcasts will destroy input local properties.
-        this.deliveredProperties = new StructuralPropertiesVector(pp, new 
ArrayList<ILocalStructuralProperty>());
+        this.deliveredProperties = new StructuralPropertiesVector(pp, new 
ArrayList<>());
     }
 
     @Override
diff --git 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
index 55b40b4151..e57af36652 100644
--- 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
+++ 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
@@ -105,8 +105,12 @@ public class HashPartitionExchangePOperator extends 
AbstractExchangePOperator {
             hashFunctionFactories[i] = 
hashFunProvider.getBinaryHashFunctionFactory(env.getVarType(v));
             ++i;
         }
-        ITuplePartitionComputerFactory tpcf =
-                new FieldHashPartitionComputerFactory(keys, 
hashFunctionFactories, partitionsMap);
+        ITuplePartitionComputerFactory tpcf;
+        if (partitionsMap == null) {
+            tpcf = FieldHashPartitionComputerFactory.of(keys, 
hashFunctionFactories);
+        } else {
+            tpcf = FieldHashPartitionComputerFactory.withMap(keys, 
hashFunctionFactories, partitionsMap);
+        }
         IConnectorDescriptor conn = new 
MToNPartitioningConnectorDescriptor(spec, tpcf);
         return new Pair<>(conn, null);
     }
diff --git 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
index 58614647fc..2fdf54e809 100644
--- 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
+++ 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
@@ -107,8 +107,8 @@ public class HashPartitionMergeExchangePOperator extends 
AbstractExchangePOperat
     @Override
     public PhysicalRequirements 
getRequiredPropertiesForChildren(ILogicalOperator op,
             IPhysicalPropertiesVector reqdByParent, IOptimizationContext 
context) {
-        List<ILocalStructuralProperty> orderProps = new 
LinkedList<ILocalStructuralProperty>();
-        List<OrderColumn> columns = new ArrayList<OrderColumn>();
+        List<ILocalStructuralProperty> orderProps = new LinkedList<>();
+        List<OrderColumn> columns = new ArrayList<>();
         for (OrderColumn oc : orderColumns) {
             LogicalVariable var = oc.getColumn();
             columns.add(new OrderColumn(var, oc.getOrder()));
@@ -139,8 +139,12 @@ public class HashPartitionMergeExchangePOperator extends 
AbstractExchangePOperat
                 ++i;
             }
         }
-        ITuplePartitionComputerFactory tpcf =
-                new FieldHashPartitionComputerFactory(keys, 
hashFunctionFactories, partitionsMap);
+        ITuplePartitionComputerFactory tpcf;
+        if (partitionsMap == null) {
+            tpcf = FieldHashPartitionComputerFactory.of(keys, 
hashFunctionFactories);
+        } else {
+            tpcf = FieldHashPartitionComputerFactory.withMap(keys, 
hashFunctionFactories, partitionsMap);
+        }
 
         int n = orderColumns.size();
         int[] sortFields = new int[n];
@@ -164,7 +168,7 @@ public class HashPartitionMergeExchangePOperator extends 
AbstractExchangePOperat
 
         IConnectorDescriptor conn =
                 new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, 
sortFields, comparatorFactories, nkcf);
-        return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
+        return new Pair<>(conn, null);
     }
 
     public List<LogicalVariable> getPartitionFields() {
diff --git 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
index aedf04646a..c304fd6e7f 100644
--- 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
+++ 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
@@ -26,7 +26,6 @@ import 
org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
 import org.apache.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
 import 
org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import 
org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
 import 
org.apache.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
 import 
org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
 import 
org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
@@ -45,8 +44,8 @@ public class RandomMergeExchangePOperator extends 
AbstractExchangePOperator {
 
     @Override
     public void computeDeliveredProperties(ILogicalOperator op, 
IOptimizationContext context) {
-        this.deliveredProperties = new 
StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED,
-                new ArrayList<ILocalStructuralProperty>(0));
+        this.deliveredProperties =
+                new 
StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, new 
ArrayList<>(0));
     }
 
     @Override
@@ -59,6 +58,6 @@ public class RandomMergeExchangePOperator extends 
AbstractExchangePOperator {
     public Pair<IConnectorDescriptor, TargetConstraint> 
createConnectorDescriptor(IConnectorDescriptorRegistry spec,
             ILogicalOperator op, IOperatorSchema opSchema, JobGenContext 
context) {
         IConnectorDescriptor conn = new MToNBroadcastConnectorDescriptor(spec);
-        return new Pair<IConnectorDescriptor, TargetConstraint>(conn, 
TargetConstraint.ONE);
+        return new Pair<>(conn, TargetConstraint.ONE);
     }
 }
diff --git 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
index 6c02dca365..49d678a416 100644
--- 
a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
+++ 
b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
@@ -78,7 +78,7 @@ public class SortMergeExchangePOperator extends 
AbstractExchangePOperator {
         sb.append(" [");
         sb.append(sortColumns[0]);
         for (int i = 1; i < sortColumns.length; i++) {
-            sb.append(", " + sortColumns[i]);
+            sb.append(", ").append(sortColumns[i]);
         }
         sb.append(" ]");
         return sb.toString();
@@ -94,8 +94,8 @@ public class SortMergeExchangePOperator extends 
AbstractExchangePOperator {
             pv1 = inp1.getDeliveredPhysicalProperties();
         }
 
-        List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
-        List<ILocalStructuralProperty> localProps = new 
ArrayList<ILocalStructuralProperty>(sortColumns.length);
+        List<OrderColumn> orderColumns = new ArrayList<>();
+        List<ILocalStructuralProperty> localProps = new 
ArrayList<>(sortColumns.length);
         for (ILocalStructuralProperty prop : pv1.getLocalProperties()) {
             if (prop.getPropertyType() == PropertyType.LOCAL_ORDER_PROPERTY) {
                 LocalOrderProperty lop = (LocalOrderProperty) prop;
@@ -109,8 +109,6 @@ public class SortMergeExchangePOperator extends 
AbstractExchangePOperator {
                         break;
                     }
                 }
-            } else {
-                continue;
             }
         }
         if (orderColumns.size() > 0) {
@@ -122,7 +120,7 @@ public class SortMergeExchangePOperator extends 
AbstractExchangePOperator {
     @Override
     public PhysicalRequirements 
getRequiredPropertiesForChildren(ILogicalOperator op,
             IPhysicalPropertiesVector reqdByParent, IOptimizationContext 
context) {
-        List<ILocalStructuralProperty> localProps = new 
ArrayList<ILocalStructuralProperty>(sortColumns.length);
+        List<ILocalStructuralProperty> localProps = new 
ArrayList<>(sortColumns.length);
         localProps.add(new LocalOrderProperty(Arrays.asList(sortColumns)));
         StructuralPropertiesVector[] r =
                 new StructuralPropertiesVector[] { new 
StructuralPropertiesVector(null, localProps) };
@@ -152,9 +150,9 @@ public class SortMergeExchangePOperator extends 
AbstractExchangePOperator {
                 nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, 
sortColumns[i].getOrder() == OrderKind.ASC);
             }
         }
-        ITuplePartitionComputerFactory tpcf = new 
FieldHashPartitionComputerFactory(sortFields, hashFuns);
+        ITuplePartitionComputerFactory tpcf = 
FieldHashPartitionComputerFactory.of(sortFields, hashFuns);
         IConnectorDescriptor conn = new 
MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comps, nkcf);
-        return new Pair<IConnectorDescriptor, TargetConstraint>(conn, 
TargetConstraint.ONE);
+        return new Pair<>(conn, TargetConstraint.ONE);
     }
 
 }
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
index c91a0ea9a2..9ee21050d3 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
@@ -34,13 +34,17 @@ public class FieldHashPartitionComputerFactory implements 
ITuplePartitionCompute
     private final IBinaryHashFunctionFactory[] hashFunctionFactories;
     private final int[][] partitionsMap;
 
-    public FieldHashPartitionComputerFactory(int[] hashFields, 
IBinaryHashFunctionFactory[] hashFunctionFactories) {
-        this.hashFields = hashFields;
-        this.hashFunctionFactories = hashFunctionFactories;
-        this.partitionsMap = null;
+    public static FieldHashPartitionComputerFactory of(int[] hashFields,
+            IBinaryHashFunctionFactory[] hashFunctionFactories) {
+        return new FieldHashPartitionComputerFactory(hashFields, 
hashFunctionFactories, null);
+    }
+
+    public static FieldHashPartitionComputerFactory withMap(int[] hashFields,
+            IBinaryHashFunctionFactory[] hashFunctionFactories, int[][] 
partitionsMap) {
+        return new FieldHashPartitionComputerFactory(hashFields, 
hashFunctionFactories, partitionsMap);
     }
 
-    public FieldHashPartitionComputerFactory(int[] hashFields, 
IBinaryHashFunctionFactory[] hashFunctionFactories,
+    private FieldHashPartitionComputerFactory(int[] hashFields, 
IBinaryHashFunctionFactory[] hashFunctionFactories,
             int[][] partitionsMap) {
         this.hashFields = hashFields;
         this.hashFunctionFactories = hashFunctionFactories;
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/HashPartitioner.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/HashPartitioner.java
index cb97d1d393..a25217e8ba 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/HashPartitioner.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/HashPartitioner.java
@@ -60,7 +60,10 @@ class HashPartitioner {
             int storagePartition = h % storagePartition2Compute.size();
             int computePartition = 
storagePartition2Compute.getOrDefault(storagePartition, Integer.MIN_VALUE);
             if (computePartition < 0 || computePartition >= nParts) {
-                throw new IllegalStateException("couldn't resolve storage 
partition to compute partition");
+                throw new IllegalStateException(
+                        "couldn't resolve storage partition " + 
storagePartition + " to compute partition "
+                                + computePartition + ". num_storage=" + 
storagePartition2Compute.size() + ", nParts="
+                                + nParts + ",storagePartition2Compute=" + 
storagePartition2Compute);
             }
             return computePartition;
         }
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
index 0b6e40e8f9..887052c0fd 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
@@ -37,9 +37,9 @@ public class LocalityAwareMToNPartitioningConnectorDescriptor 
extends AbstractMT
 
     private static final long serialVersionUID = 1L;
 
-    private ILocalityMap localityMap;
+    private final ILocalityMap localityMap;
 
-    private ITuplePartitionComputerFactory tpcf;
+    private final ITuplePartitionComputerFactory tpcf;
 
     public 
LocalityAwareMToNPartitioningConnectorDescriptor(IConnectorDescriptorRegistry 
spec,
             ITuplePartitionComputerFactory tpcf, ILocalityMap localityMap) {
@@ -48,15 +48,6 @@ public class 
LocalityAwareMToNPartitioningConnectorDescriptor extends AbstractMT
         this.tpcf = tpcf;
     }
 
-    /*
-     * (non-Javadoc)
-     *
-     * @see
-     * org.apache.hyracks.api.dataflow.IConnectorDescriptor#createPartitioner
-     * (org.apache.hyracks.api.context.IHyracksTaskContext,
-     * org.apache.hyracks.api.dataflow.value.RecordDescriptor,
-     * org.apache.hyracks.api.comm.IPartitionWriterFactory, int, int, int)
-     */
     @Override
     public IFrameWriter createPartitioner(IHyracksTaskContext ctx, 
RecordDescriptor recordDesc,
             IPartitionWriterFactory edwFactory, int index, int 
nProducerPartitions, int nConsumerPartitions)
@@ -65,14 +56,6 @@ public class 
LocalityAwareMToNPartitioningConnectorDescriptor extends AbstractMT
                 nConsumerPartitions, localityMap, index);
     }
 
-    /*
-     * (non-Javadoc)
-     *
-     * @see org.apache.hyracks.api.dataflow.IConnectorDescriptor#
-     * createPartitionCollector
-     * (org.apache.hyracks.api.context.IHyracksTaskContext,
-     * org.apache.hyracks.api.dataflow.value.RecordDescriptor, int, int, int)
-     */
     @Override
     public IPartitionCollector createPartitionCollector(IHyracksTaskContext 
ctx, RecordDescriptor recordDesc,
             int receiverIndex, int nProducerPartitions, int 
nConsumerPartitions) throws HyracksDataException {
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
index f89ccb00ae..345952ac97 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
@@ -165,9 +165,9 @@ public class InMemoryHashJoinOperatorDescriptor extends 
AbstractOperatorDescript
                 @Override
                 public void open() throws HyracksDataException {
                     ITuplePartitionComputer hpc0 =
-                            new FieldHashPartitionComputerFactory(keys0, 
hashFunctionFactories0).createPartitioner(ctx);
+                            FieldHashPartitionComputerFactory.of(keys0, 
hashFunctionFactories0).createPartitioner(ctx);
                     ITuplePartitionComputer hpc1 =
-                            new FieldHashPartitionComputerFactory(keys1, 
hashFunctionFactories1).createPartitioner(ctx);
+                            FieldHashPartitionComputerFactory.of(keys1, 
hashFunctionFactories1).createPartitioner(ctx);
                     state = new HashBuildTaskState(jobletCtx.getJobId(), new 
TaskId(getActivityId(), partition));
                     ISerializableTable table = new 
SerializableHashTable(tableSize, jobletCtx, bufferManager);
                     state.joiner = new InMemoryHashJoin(jobletCtx, new 
FrameTupleAccessor(rd0), hpc0,
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
index ccd563a664..8d5f834db3 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -202,7 +202,7 @@ public class InsertPipelineExample {
         IBinaryHashFunctionFactory[] hashFactories = new 
IBinaryHashFunctionFactory[1];
         hashFactories[0] = 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
         IConnectorDescriptor hashConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, 
hashFactories));
+                FieldHashPartitionComputerFactory.withMap(new int[] { 0 }, 
hashFactories, partitionsMap));
 
         // connect the ops
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 1495e602aa..425f639909 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -166,7 +166,7 @@ public class PrimaryIndexBulkLoadExample {
         IBinaryHashFunctionFactory[] hashFactories = new 
IBinaryHashFunctionFactory[1];
         hashFactories[0] = 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
         IConnectorDescriptor hashConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, 
hashFactories));
+                FieldHashPartitionComputerFactory.withMap(new int[] { 0 }, 
hashFactories, partitionsMap));
         NullSinkOperatorDescriptor nsOpDesc = new 
NullSinkOperatorDescriptor(spec);
         JobHelper.createPartitionConstraint(spec, nsOpDesc, splitNCs);
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
index ae718bb3d8..3ed60fbf16 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
@@ -130,7 +130,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(keyFields, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
@@ -179,7 +179,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(keyFields, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
@@ -220,7 +220,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(keyFields, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
@@ -269,7 +269,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(keyFields, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
@@ -309,7 +309,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(keyFields, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
@@ -358,7 +358,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(keyFields, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
@@ -398,7 +398,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
@@ -448,7 +448,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
@@ -492,7 +492,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
@@ -547,7 +547,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
@@ -590,7 +590,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
@@ -642,7 +642,7 @@ public class AggregationTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
index dc5d01737b..330aee8273 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
@@ -257,7 +257,7 @@ public class CancelJobTest extends 
AbstractMultiNCIntegrationTest {
 
         // Hash-repartitioning connector.
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, scanOp, 0, sleepOp, 0);
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
index fd236bae25..89d985d3c0 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
@@ -105,7 +105,7 @@ public class CountOfCountsTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC2_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, sorter, 0);
 
@@ -113,7 +113,7 @@ public class CountOfCountsTest extends 
AbstractIntegrationTest {
         spec.connect(conn2, sorter, 0, group, 0);
 
         IConnectorDescriptor conn3 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn3, group, 0, sorter2, 0);
 
@@ -175,7 +175,7 @@ public class CountOfCountsTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, sorter, 0);
 
@@ -183,7 +183,7 @@ public class CountOfCountsTest extends 
AbstractIntegrationTest {
         spec.connect(conn2, sorter, 0, group, 0);
 
         IConnectorDescriptor conn3 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn3, group, 0, sorter2, 0);
 
@@ -245,7 +245,7 @@ public class CountOfCountsTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC1_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, sorter, 0);
 
@@ -253,7 +253,7 @@ public class CountOfCountsTest extends 
AbstractIntegrationTest {
         spec.connect(conn2, sorter, 0, group, 0);
 
         IConnectorDescriptor conn3 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn3, group, 0, sorter2, 0);
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
index 2593e1e862..1030fa2c15 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
@@ -99,7 +99,7 @@ public class HeapSortMergeTest extends 
AbstractIntegrationTest {
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, 
sorter, 0);
 
-        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new 
FieldHashPartitionComputerFactory(
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, 
FieldHashPartitionComputerFactory.of(
                 new int[] { 1, 0 },
                 new IBinaryHashFunctionFactory[] { 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
@@ -162,7 +162,7 @@ public class HeapSortMergeTest extends 
AbstractIntegrationTest {
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, 
sorter, 0);
 
-        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new 
FieldHashPartitionComputerFactory(
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, 
FieldHashPartitionComputerFactory.of(
                 new int[] { 1, 0 },
                 new IBinaryHashFunctionFactory[] { 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
index 5a8ec34aab..7e586ad9ab 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
@@ -148,7 +148,7 @@ public class LocalityAwareConnectorTest extends 
AbstractMultiNCIntegrationTest {
         nodemap.set(7);
 
         IConnectorDescriptor conn1 = new 
LocalityAwareMToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
                 new HashtableLocalityMap(nodemap));
@@ -206,7 +206,7 @@ public class LocalityAwareConnectorTest extends 
AbstractMultiNCIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, 
"asterix-005", "asterix-006");
 
         IConnectorDescriptor conn1 = new 
LocalityAwareMToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keyFields,
+                FieldHashPartitionComputerFactory.of(keyFields,
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
                 new GlobalHashingLocalityMap());
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
index 66a2cbd9bb..0f00f50ea2 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
@@ -111,7 +111,7 @@ public class ScanPrintTest extends AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC2_ID);
 
         IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, ordScanner, 0, printer, 0);
 
@@ -149,9 +149,9 @@ public class ScanPrintTest extends AbstractIntegrationTest {
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC2_ID);
 
-        IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
-                        
PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new 
MToNPartitioningConnectorDescriptor(spec, FieldHashPartitionComputerFactory.of(
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { 
PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY) }));
         spec.connect(conn1, ordScanner, 0, printer, 0);
 
         spec.addRoot(printer);
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
index 0c2934a8a1..af18dc44be 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
@@ -91,7 +91,7 @@ public class SortMergeTest extends AbstractIntegrationTest {
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, 
sorter, 0);
 
         spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                FieldHashPartitionComputerFactory.of(new int[] { 1 },
                         new IBinaryHashFunctionFactory[] {
                                 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
                 new int[] { 1 }, new IBinaryComparatorFactory[] { 
UTF8StringBinaryComparatorFactory.INSTANCE },
@@ -141,7 +141,7 @@ public class SortMergeTest extends AbstractIntegrationTest {
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, 
sorter, 0);
 
-        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new 
FieldHashPartitionComputerFactory(
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, 
FieldHashPartitionComputerFactory.of(
                 new int[] { 1, 0 },
                 new IBinaryHashFunctionFactory[] { 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index 296b6823ad..1e08c2efe7 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -354,12 +354,12 @@ public class TPCHCustomerOrderHashJoinTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
         IConnectorDescriptor custJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(custJoinConn, custScanner, 0, join, 1);
 
@@ -413,12 +413,12 @@ public class TPCHCustomerOrderHashJoinTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
         IConnectorDescriptor custJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(custJoinConn, custScanner, 0, join, 1);
 
@@ -471,12 +471,12 @@ public class TPCHCustomerOrderHashJoinTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
         IConnectorDescriptor custJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(custJoinConn, custScanner, 0, join, 1);
 
@@ -535,12 +535,12 @@ public class TPCHCustomerOrderHashJoinTest extends 
AbstractIntegrationTest {
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, 
NC1_ID);
 
         IConnectorDescriptor ordPartConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(ordPartConn, ordScanner, 0, ordMat, 0);
 
         IConnectorDescriptor custPartConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(custPartConn, custScanner, 0, custMat, 0);
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
index 804b59f8f1..eafcebaeb6 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
@@ -105,7 +105,7 @@ public class VSizeFrameSortMergeTest extends 
AbstractIntegrationTest {
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, 
sorter, 0);
 
-        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new 
FieldHashPartitionComputerFactory(
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, 
FieldHashPartitionComputerFactory.of(
                 new int[] { 1, 0 },
                 new IBinaryHashFunctionFactory[] { 
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
index 2bc742a759..d05e353ddc 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
@@ -167,7 +167,7 @@ public class WordCountMain {
 
             createPartitionConstraint(spec, gBy, outSplits);
             IConnectorDescriptor scanGroupConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                    new FieldHashPartitionComputerFactory(keys, new 
IBinaryHashFunctionFactory[] {
+                    FieldHashPartitionComputerFactory.of(keys, new 
IBinaryHashFunctionFactory[] {
                             
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
             spec.connect(scanGroupConn, wordScanner, 0, gBy, 0);
         } else {
@@ -184,7 +184,7 @@ public class WordCountMain {
             createPartitionConstraint(spec, sorter, outSplits);
 
             IConnectorDescriptor scanSortConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                    new FieldHashPartitionComputerFactory(keys, new 
IBinaryHashFunctionFactory[] {
+                    FieldHashPartitionComputerFactory.of(keys, new 
IBinaryHashFunctionFactory[] {
                             
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
             spec.connect(scanSortConn, wordScanner, 0, sorter, 0);
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
index e66529ce91..0b26f16293 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
@@ -176,7 +176,7 @@ public class Groupby {
         }
         // Connect scanner with the grouper
         IConnectorDescriptor scanGroupConnDef2 = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(keys,
+                FieldHashPartitionComputerFactory.of(keys,
                         new IBinaryHashFunctionFactory[] {
                                 // 
PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY),
                                 
PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY) }));
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
index a351c85f74..14ac8e11b1 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
@@ -204,12 +204,12 @@ public class Join {
         PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 
numJoinPartitions);
 
         IConnectorDescriptor ordJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 1 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(ordJoinConn, ordScanner, 0, join, 1);
 
         IConnectorDescriptor custJoinConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
+                FieldHashPartitionComputerFactory.of(new int[] { 0 }, new 
IBinaryHashFunctionFactory[] {
                         
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(custJoinConn, custScanner, 0, join, 0);
 
@@ -236,7 +236,7 @@ public class Join {
             createPartitionConstraint(spec, gby, resultSplits);
 
             IConnectorDescriptor joinGroupConn = new 
MToNPartitioningConnectorDescriptor(spec,
-                    new FieldHashPartitionComputerFactory(new int[] { 6 }, new 
IBinaryHashFunctionFactory[] {
+                    FieldHashPartitionComputerFactory.of(new int[] { 6 }, new 
IBinaryHashFunctionFactory[] {
                             
PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
             spec.connect(joinGroupConn, join, 0, gby, 0);
 
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
index 2e57ed8588..bebc7c7dac 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
@@ -158,7 +158,7 @@ public class Sort {
 
         spec.connect(
                 new MToNPartitioningMergingConnectorDescriptor(spec,
-                        new FieldHashPartitionComputerFactory(SortFields, 
orderBinaryHashFunctionFactories), SortFields,
+                        FieldHashPartitionComputerFactory.of(SortFields, 
orderBinaryHashFunctionFactories), SortFields,
                         SortFieldsComparatorFactories, new 
UTF8StringNormalizedKeyComputerFactory()),
                 sorter, 0, printer, 0);
 

Reply via email to