Repository: systemml Updated Branches: refs/heads/master c33e066ac -> eb852482b
[MINOR] Remove redundancy from builtin function hop construction Project: http://git-wip-us.apache.org/repos/asf/systemml/repo Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/eb852482 Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/eb852482 Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/eb852482 Branch: refs/heads/master Commit: eb852482b359dd3405fe0bd3dcd678715a503f43 Parents: c33e066 Author: Matthias Boehm <[email protected]> Authored: Tue Apr 3 20:41:02 2018 -0700 Committer: Matthias Boehm <[email protected]> Committed: Tue Apr 3 21:59:40 2018 -0700 ---------------------------------------------------------------------- .../java/org/apache/sysml/hops/AggBinaryOp.java | 5 +- .../java/org/apache/sysml/hops/BinaryOp.java | 8 +- .../org/apache/sysml/hops/ConvolutionOp.java | 86 ++-- src/main/java/org/apache/sysml/hops/Hop.java | 36 +- .../org/apache/sysml/hops/QuaternaryOp.java | 2 +- .../java/org/apache/sysml/hops/ReorgOp.java | 10 +- .../java/org/apache/sysml/hops/TernaryOp.java | 16 +- .../codegen/opt/PlanSelectionFuseCostBased.java | 8 +- .../opt/PlanSelectionFuseCostBasedV2.java | 11 +- .../sysml/hops/rewrite/HopRewriteUtils.java | 4 +- .../RewriteAlgebraicSimplificationDynamic.java | 4 +- .../RewriteAlgebraicSimplificationStatic.java | 6 +- .../apache/sysml/lops/ConvolutionTransform.java | 22 +- .../org/apache/sysml/parser/DMLTranslator.java | 448 ++++--------------- .../parfor/opt/OptimizerRuleBased.java | 2 +- 15 files changed, 184 insertions(+), 484 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/AggBinaryOp.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/AggBinaryOp.java b/src/main/java/org/apache/sysml/hops/AggBinaryOp.java index fbbb2dd..03a1bb6 100644 --- a/src/main/java/org/apache/sysml/hops/AggBinaryOp.java +++ b/src/main/java/org/apache/sysml/hops/AggBinaryOp.java @@ -1321,13 +1321,10 @@ public class AggBinaryOp extends Hop implements MultiThreadedHop } else //MR { - if( h1 instanceof ReorgOp && ((ReorgOp)h1).getOp()==ReOrgOp.TRANSPOSE ) - { + if( HopRewriteUtils.isTransposeOperation(h1) ) { long m = h1.getDim1(); long cd = h1.getDim2(); long n = h2.getDim2(); - - //note: output size constraint for mapmult already checked by optfindmmultmethod if( m>0 && cd>0 && n>0 && (m*cd > (cd*n + m*n)) && 2 * OptimizerUtils.estimateSizeExactSparsity(cd, n, 1.0) < OptimizerUtils.getLocalMemBudget() && http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/BinaryOp.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/BinaryOp.java b/src/main/java/org/apache/sysml/hops/BinaryOp.java index 35d40c5..d66ac12 100644 --- a/src/main/java/org/apache/sysml/hops/BinaryOp.java +++ b/src/main/java/org/apache/sysml/hops/BinaryOp.java @@ -139,8 +139,8 @@ public class BinaryOp extends Hop switch(op) { case IQM: - case CENTRALMOMENT: - case COVARIANCE: + case MOMENT: + case COV: case QUANTILE: case INTERQUANTILE: case MEDIAN: @@ -197,11 +197,11 @@ public class BinaryOp extends Hop constructLopsIQM(et); break; } - case CENTRALMOMENT: { + case MOMENT: { constructLopsCentralMoment(et); break; } - case COVARIANCE: { + case COV: { constructLopsCovariance(et); break; } http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/ConvolutionOp.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/ConvolutionOp.java b/src/main/java/org/apache/sysml/hops/ConvolutionOp.java index 299f7c9..4224474 100644 --- a/src/main/java/org/apache/sysml/hops/ConvolutionOp.java +++ b/src/main/java/org/apache/sysml/hops/ConvolutionOp.java @@ -116,13 +116,13 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop ArrayList<Hop> inputs = getInput(); switch( op ) { - case MAX_POOLING: - case MAX_POOLING_BACKWARD: - case AVG_POOLING: - case AVG_POOLING_BACKWARD: - case DIRECT_CONV2D: - case DIRECT_CONV2D_BACKWARD_DATA: - case DIRECT_CONV2D_BACKWARD_FILTER: + case MAX_POOL: + case MAX_POOL_BACKWARD: + case AVG_POOL: + case AVG_POOL_BACKWARD: + case CONV2D: + case CONV2D_BACKWARD_DATA: + case CONV2D_BACKWARD_FILTER: case BIAS_ADD: case BIAS_MULTIPLY: { @@ -151,11 +151,11 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop private int getNumExpectedInputs() { switch(op) { - case MAX_POOLING_BACKWARD: - case AVG_POOLING_BACKWARD: - case DIRECT_CONV2D: - case DIRECT_CONV2D_BACKWARD_FILTER: - case DIRECT_CONV2D_BACKWARD_DATA: + case MAX_POOL_BACKWARD: + case AVG_POOL_BACKWARD: + case CONV2D: + case CONV2D_BACKWARD_FILTER: + case CONV2D_BACKWARD_DATA: return 14; case BIAS_ADD: case BIAS_MULTIPLY: @@ -186,7 +186,7 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop } private static boolean isInputConv2d(Hop input) { - return input instanceof ConvolutionOp && ((ConvolutionOp) input).getOp() == ConvOp.DIRECT_CONV2D; + return input instanceof ConvolutionOp && ((ConvolutionOp) input).getOp() == ConvOp.CONV2D; } /** @@ -213,8 +213,8 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop * @return output lop of max_pool/avg_pool operation with same parameters as this hop */ private Lop getMaxPoolOutputLop() { - if(op == ConvOp.MAX_POOLING_BACKWARD || op == ConvOp.AVG_POOLING_BACKWARD) { - ConvOp opType = (op == ConvOp.MAX_POOLING_BACKWARD) ? ConvOp.MAX_POOLING : ConvOp.AVG_POOLING; + if(op == ConvOp.MAX_POOL_BACKWARD || op == ConvOp.AVG_POOL_BACKWARD) { + ConvOp opType = (op == ConvOp.MAX_POOL_BACKWARD) ? ConvOp.MAX_POOL : ConvOp.AVG_POOL; Hop inputImage = getInput().get(0); for(Hop tmpParent : inputImage.getParent()) { if(!(tmpParent instanceof ConvolutionOp)) @@ -242,16 +242,16 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop // by reducing unnecessary sparse-to-dense-to-sparse conversion. // For other backends, this operators is not necessary as it reduces an additional relu operator. Hop parentReLU = isInputReLU(inputs.get(0)); - if(OptimizerUtils.ALLOW_OPERATOR_FUSION && et == ExecType.CP && op == ConvOp.MAX_POOLING && parentReLU != null) { + if(OptimizerUtils.ALLOW_OPERATOR_FUSION && et == ExecType.CP && op == ConvOp.MAX_POOL && parentReLU != null) { lhsInputLop = parentReLU.constructLops(); lopOp = OperationTypes.RELU_MAX_POOLING; } - else if(OptimizerUtils.ALLOW_OPERATOR_FUSION && et == ExecType.CP && op == ConvOp.MAX_POOLING_BACKWARD && parentReLU != null) { + else if(OptimizerUtils.ALLOW_OPERATOR_FUSION && et == ExecType.CP && op == ConvOp.MAX_POOL_BACKWARD && parentReLU != null) { lhsInputLop = parentReLU.constructLops(); lopOp = OperationTypes.RELU_MAX_POOLING_BACKWARD; } else if(OptimizerUtils.ALLOW_OPERATOR_FUSION && op == ConvOp.BIAS_ADD && isInputConv2d(inputs.get(0))) { - lopOp = OperationTypes.DIRECT_CONV2D_BIAS_ADD; + lopOp = OperationTypes.CONV2D_BIAS_ADD; // the first lop is image lhsInputLop = inputs.get(0).getInput().get(0).constructLops(); @@ -436,7 +436,7 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop { ArrayList<IntermediateDimensions> gpuIntermediates = new ArrayList<>(); ArrayList<IntermediateDimensions> cpIntermediates = new ArrayList<>(); - if(getOp() == ConvOp.DIRECT_CONV2D) { + if(getOp() == ConvOp.CONV2D) { // Assumption: To compile a GPU conv2d operator, following should fit on the GPU: // 1. output in dense format (i.e. computeOutputMemEstimate) // 2. input in any format @@ -450,7 +450,7 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop // im2col operation preserves the worst-case sparsity of the input. cpIntermediates.add(new IntermediateDimensions(this, "CRS", "PQ", getInput().get(0).getSparsity())); } - else if(getOp() == ConvOp.DIRECT_CONV2D_BACKWARD_DATA) { + else if(getOp() == ConvOp.CONV2D_BACKWARD_DATA) { // Assumption: To compile a GPU conv2d_backward_data operator, following should fit on the GPU: // 1. output in dense format (i.e. computeOutputMemEstimate) // 2. dout in any format @@ -467,7 +467,7 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop // Note: worst-case sparsity for the input of col2im (of size NPQ x CRS where N is determined by degree of parallelism) cpIntermediates.add(new IntermediateDimensions(this, "PQ", "CRS")); } - else if(getOp() == ConvOp.DIRECT_CONV2D_BACKWARD_FILTER) { + else if(getOp() == ConvOp.CONV2D_BACKWARD_FILTER) { // Assumption: To compile a GPU conv2d_backward_filter operator, following should fit on the GPU: // 1. output in dense format (i.e. computeOutputMemEstimate) // 2. dout in any format @@ -483,11 +483,11 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop // im2col operation preserves the worst-case sparsity of the input. cpIntermediates.add(new IntermediateDimensions(this, "CRS", "PQ", getInput().get(0).getSparsity())); } - else if(getOp() == ConvOp.MAX_POOLING || getOp() == ConvOp.AVG_POOLING) { + else if(getOp() == ConvOp.MAX_POOL || getOp() == ConvOp.AVG_POOL) { // Account for potential sparse-to-dense conversion of atleast 1 input row gpuIntermediates.add(new IntermediateDimensions(this, 1, "CHW")); } - else if(getOp() == ConvOp.MAX_POOLING_BACKWARD || getOp() == ConvOp.AVG_POOLING_BACKWARD) { + else if(getOp() == ConvOp.MAX_POOL_BACKWARD || getOp() == ConvOp.AVG_POOL_BACKWARD) { // Account for potential sparse-to-dense conversion of atleast 1 input + dout row gpuIntermediates.add(new IntermediateDimensions(this, 1, "CHW")); gpuIntermediates.add(new IntermediateDimensions(this, 1, "CPQ")); @@ -568,10 +568,10 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop ConvolutionParameters parseInput() { Hop imageHeightHop = null; Hop filterHeightHop = null; - if(op == ConvOp.MAX_POOLING_BACKWARD || op == ConvOp.AVG_POOLING_BACKWARD - || op == ConvOp.DIRECT_CONV2D - || op == ConvOp.DIRECT_CONV2D_BACKWARD_FILTER - || op == ConvOp.DIRECT_CONV2D_BACKWARD_DATA) { + if(op == ConvOp.MAX_POOL_BACKWARD || op == ConvOp.AVG_POOL_BACKWARD + || op == ConvOp.CONV2D + || op == ConvOp.CONV2D_BACKWARD_FILTER + || op == ConvOp.CONV2D_BACKWARD_DATA) { imageHeightHop = getInput().get(8); filterHeightHop = getInput().get(12); _cachedParams.setIfUnknown( @@ -605,8 +605,8 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop } if(INFER_TENSOR_SHAPE_FROM_PARENT_CONV_OP) { - boolean isPool = (getOp() == ConvOp.MAX_POOLING || getOp() == ConvOp.AVG_POOLING); - boolean isConv = getOp() == ConvOp.DIRECT_CONV2D; + boolean isPool = (getOp() == ConvOp.MAX_POOL || getOp() == ConvOp.AVG_POOL); + boolean isConv = getOp() == ConvOp.CONV2D; boolean unknownCHWPQ = _cachedParams.C < 0 || _cachedParams.H < 0 || _cachedParams.W < 0 || _cachedParams.P < 0 || _cachedParams.Q < 0; if((isPool || isConv) && unknownCHWPQ) { // Only infer input shape for convolution and maxpool @@ -675,7 +675,7 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop if(parentOp == null) return; - else if(parentOp.getOp() == ConvOp.MAX_POOLING || parentOp.getOp() == ConvOp.AVG_POOLING) { + else if(parentOp.getOp() == ConvOp.MAX_POOL || parentOp.getOp() == ConvOp.AVG_POOL) { ConvolutionParameters parentParam = parentOp.parseInput(); int prevC = _cachedParams.C; int prevH = _cachedParams.H; int prevW = _cachedParams.W; // [C, P, Q] from maxpool becomes [C, H, W] of next op @@ -691,7 +691,7 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop throwExceptionIfNotEqual(prevW, _cachedParams.W, "W"); } } - else if(parentOp.getOp() == ConvOp.DIRECT_CONV2D) { + else if(parentOp.getOp() == ConvOp.CONV2D) { ConvolutionParameters parentParam = parentOp.parseInput(); int prevC = _cachedParams.C; int prevH = _cachedParams.H; int prevW = _cachedParams.W; // [K, P, Q] from convolution becomes [C, H, W] of next op @@ -725,37 +725,37 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop switch(op) { - case MAX_POOLING: - case AVG_POOLING: + case MAX_POOL: + case AVG_POOL: { _dim1 = getDim("N"); _dim2 = getDim("CPQ"); _nnz = -1; // cannot infer stats break; } - case MAX_POOLING_BACKWARD: - case AVG_POOLING_BACKWARD: + case MAX_POOL_BACKWARD: + case AVG_POOL_BACKWARD: { _dim1 = getDim("N"); _dim2 = getDim("CHW"); _nnz = -1; break; } - case DIRECT_CONV2D: + case CONV2D: { _dim1 = getDim("N"); _dim2 = getDim("KPQ"); _nnz = -1; // cannot infer stats break; } - case DIRECT_CONV2D_BACKWARD_DATA: + case CONV2D_BACKWARD_DATA: { _dim1 = getDim("N"); _dim2 = getDim("CHW"); _nnz = -1; // cannot infer stats break; } - case DIRECT_CONV2D_BACKWARD_FILTER: + case CONV2D_BACKWARD_FILTER: { _dim1 = getDim("K"); _dim2 = getDim("CRS"); @@ -835,22 +835,22 @@ public class ConvolutionOp extends Hop implements MultiThreadedHop Hop dout = null; // shape: N x KPQ Hop dout1 = null; // shape: N x CPQ - if(getOp() == ConvOp.DIRECT_CONV2D) { + if(getOp() == ConvOp.CONV2D) { input = getInput().get(0); filter = getInput().get(1); } - else if(getOp() == ConvOp.DIRECT_CONV2D_BACKWARD_DATA) { + else if(getOp() == ConvOp.CONV2D_BACKWARD_DATA) { filter = getInput().get(0); dout = getInput().get(1); } - else if(getOp() == ConvOp.DIRECT_CONV2D_BACKWARD_FILTER) { + else if(getOp() == ConvOp.CONV2D_BACKWARD_FILTER) { input = getInput().get(0); dout = getInput().get(1); } - else if(getOp() == ConvOp.MAX_POOLING || getOp() == ConvOp.AVG_POOLING) { + else if(getOp() == ConvOp.MAX_POOL || getOp() == ConvOp.AVG_POOL) { input = getInput().get(0); } - else if(getOp() == ConvOp.MAX_POOLING_BACKWARD || getOp() == ConvOp.AVG_POOLING_BACKWARD) { + else if(getOp() == ConvOp.MAX_POOL_BACKWARD || getOp() == ConvOp.AVG_POOL_BACKWARD) { input = getInput().get(0); dout1 = getInput().get(1); } http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/Hop.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/Hop.java b/src/main/java/org/apache/sysml/hops/Hop.java index 1a93a27..9445220 100644 --- a/src/main/java/org/apache/sysml/hops/Hop.java +++ b/src/main/java/org/apache/sysml/hops/Hop.java @@ -1064,7 +1064,7 @@ public abstract class Hop implements ParseInfo public enum OpOp2 { PLUS, MINUS, MULT, DIV, MODULUS, INTDIV, LESS, LESSEQUAL, GREATER, GREATEREQUAL, EQUAL, NOTEQUAL, MIN, MAX, AND, OR, XOR, LOG, POW, PRINT, CONCAT, QUANTILE, INTERQUANTILE, IQM, - CENTRALMOMENT, COVARIANCE, CBIND, RBIND, SOLVE, MEDIAN, INVALID, + MOMENT, COV, CBIND, RBIND, SOLVE, MEDIAN, INVALID, //fused ML-specific operators for performance MINUS_NZ, //sparse-safe minus: X-(mean*ppred(X,0,!=)) LOG_NZ, //sparse-safe log; ppred(X,0,"!=")*log(X,0.5) @@ -1074,7 +1074,7 @@ public abstract class Hop implements ParseInfo // Operations that require 3 operands public enum OpOp3 { - QUANTILE, INTERQUANTILE, CTABLE, CENTRALMOMENT, COVARIANCE, PLUS_MULT, MINUS_MULT, IFELSE + QUANTILE, INTERQUANTILE, CTABLE, MOMENT, COV, PLUS_MULT, MINUS_MULT, IFELSE } // Operations that require 4 operands @@ -1096,7 +1096,7 @@ public abstract class Hop implements ParseInfo } public enum ReOrgOp { - TRANSPOSE, DIAG, RESHAPE, SORT, REV + TRANS, DIAG, RESHAPE, SORT, REV //Note: Diag types are invalid because for unknown sizes this would //create incorrect plans (now we try to infer it for memory estimates //and rewrites but the final choice is made during runtime) @@ -1104,8 +1104,8 @@ public abstract class Hop implements ParseInfo } public enum ConvOp { - MAX_POOLING, MAX_POOLING_BACKWARD, AVG_POOLING, AVG_POOLING_BACKWARD, - DIRECT_CONV2D, DIRECT_CONV2D_BACKWARD_FILTER, DIRECT_CONV2D_BACKWARD_DATA, + MAX_POOL, MAX_POOL_BACKWARD, AVG_POOL, AVG_POOL_BACKWARD, + CONV2D, CONV2D_BACKWARD_FILTER, CONV2D_BACKWARD_DATA, BIAS_ADD, BIAS_MULTIPLY } @@ -1159,7 +1159,7 @@ public abstract class Hop implements ParseInfo protected static final HashMap<ReOrgOp, org.apache.sysml.lops.Transform.OperationTypes> HopsTransf2Lops; static { HopsTransf2Lops = new HashMap<>(); - HopsTransf2Lops.put(ReOrgOp.TRANSPOSE, org.apache.sysml.lops.Transform.OperationTypes.Transpose); + HopsTransf2Lops.put(ReOrgOp.TRANS, org.apache.sysml.lops.Transform.OperationTypes.Transpose); HopsTransf2Lops.put(ReOrgOp.REV, org.apache.sysml.lops.Transform.OperationTypes.Rev); HopsTransf2Lops.put(ReOrgOp.DIAG, org.apache.sysml.lops.Transform.OperationTypes.Diag); HopsTransf2Lops.put(ReOrgOp.RESHAPE, org.apache.sysml.lops.Transform.OperationTypes.Reshape); @@ -1170,15 +1170,15 @@ public abstract class Hop implements ParseInfo protected static final HashMap<ConvOp, org.apache.sysml.lops.ConvolutionTransform.OperationTypes> HopsConv2Lops; static { HopsConv2Lops = new HashMap<>(); - HopsConv2Lops.put(ConvOp.MAX_POOLING, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.MAX_POOLING); - HopsConv2Lops.put(ConvOp.MAX_POOLING_BACKWARD, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.MAX_POOLING_BACKWARD); - HopsConv2Lops.put(ConvOp.AVG_POOLING, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.AVG_POOLING); - HopsConv2Lops.put(ConvOp.AVG_POOLING_BACKWARD, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.AVG_POOLING_BACKWARD); - HopsConv2Lops.put(ConvOp.DIRECT_CONV2D, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.DIRECT_CONV2D); + HopsConv2Lops.put(ConvOp.MAX_POOL, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.MAX_POOL); + HopsConv2Lops.put(ConvOp.MAX_POOL_BACKWARD, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.MAX_POOL_BACKWARD); + HopsConv2Lops.put(ConvOp.AVG_POOL, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.AVG_POOL); + HopsConv2Lops.put(ConvOp.AVG_POOL_BACKWARD, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.AVG_POOL_BACKWARD); + HopsConv2Lops.put(ConvOp.CONV2D, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.CONV2D); HopsConv2Lops.put(ConvOp.BIAS_ADD, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.BIAS_ADD); HopsConv2Lops.put(ConvOp.BIAS_MULTIPLY, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.BIAS_MULTIPLY); - HopsConv2Lops.put(ConvOp.DIRECT_CONV2D_BACKWARD_FILTER, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.DIRECT_CONV2D_BACKWARD_FILTER); - HopsConv2Lops.put(ConvOp.DIRECT_CONV2D_BACKWARD_DATA, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.DIRECT_CONV2D_BACKWARD_DATA); + HopsConv2Lops.put(ConvOp.CONV2D_BACKWARD_FILTER, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.CONV2D_BACKWARD_FILTER); + HopsConv2Lops.put(ConvOp.CONV2D_BACKWARD_DATA, org.apache.sysml.lops.ConvolutionTransform.OperationTypes.CONV2D_BACKWARD_DATA); } protected static final HashMap<Hop.Direction, org.apache.sysml.lops.PartialAggregate.DirectionTypes> HopsDirection2Lops; @@ -1458,8 +1458,8 @@ public abstract class Hop implements ParseInfo HopsOpOp2String.put(OpOp2.INTERQUANTILE, "interquantile"); HopsOpOp2String.put(OpOp2.IQM, "IQM"); HopsOpOp2String.put(OpOp2.MEDIAN, "median"); - HopsOpOp2String.put(OpOp2.CENTRALMOMENT, "cm"); - HopsOpOp2String.put(OpOp2.COVARIANCE, "cov"); + HopsOpOp2String.put(OpOp2.MOMENT, "cm"); + HopsOpOp2String.put(OpOp2.COV, "cov"); HopsOpOp2String.put(OpOp2.CBIND, "cbind"); HopsOpOp2String.put(OpOp2.RBIND, "rbind"); HopsOpOp2String.put(OpOp2.SOLVE, "solve"); @@ -1481,8 +1481,8 @@ public abstract class Hop implements ParseInfo HopsOpOp3String.put(OpOp3.QUANTILE, "quantile"); HopsOpOp3String.put(OpOp3.INTERQUANTILE, "interquantile"); HopsOpOp3String.put(OpOp3.CTABLE, "ctable"); - HopsOpOp3String.put(OpOp3.CENTRALMOMENT, "cm"); - HopsOpOp3String.put(OpOp3.COVARIANCE, "cov"); + HopsOpOp3String.put(OpOp3.MOMENT, "cm"); + HopsOpOp3String.put(OpOp3.COV, "cov"); HopsOpOp3String.put(OpOp3.PLUS_MULT, "+*"); HopsOpOp3String.put(OpOp3.MINUS_MULT, "-*"); HopsOpOp3String.put(OpOp3.IFELSE, "ifelse"); @@ -1524,7 +1524,7 @@ public abstract class Hop implements ParseInfo protected static final HashMap<Hop.ReOrgOp, String> HopsTransf2String; static { HopsTransf2String = new HashMap<>(); - HopsTransf2String.put(ReOrgOp.TRANSPOSE, "t"); + HopsTransf2String.put(ReOrgOp.TRANS, "t"); HopsTransf2String.put(ReOrgOp.DIAG, "diag"); HopsTransf2String.put(ReOrgOp.RESHAPE, "rshape"); HopsTransf2String.put(ReOrgOp.SORT, "sort"); http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/QuaternaryOp.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/QuaternaryOp.java b/src/main/java/org/apache/sysml/hops/QuaternaryOp.java index 947f2ba..90cfc98 100644 --- a/src/main/java/org/apache/sysml/hops/QuaternaryOp.java +++ b/src/main/java/org/apache/sysml/hops/QuaternaryOp.java @@ -363,7 +363,7 @@ public class QuaternaryOp extends Hop implements MultiThreadedHop } else { // replication of t(V) for shuffle to target block - Transform ltV = new Transform(V.constructLops(), HopsTransf2Lops.get(ReOrgOp.TRANSPOSE), getDataType(), + Transform ltV = new Transform(V.constructLops(), HopsTransf2Lops.get(ReOrgOp.TRANS), getDataType(), getValueType(), ExecType.MR); ltV.getOutputParameters().setDimensions(V.getDim2(), V.getDim1(), V.getColsInBlock(), V.getRowsInBlock(), V.getNnz()); setLineNumbers(ltV); http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/ReorgOp.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/ReorgOp.java b/src/main/java/org/apache/sysml/hops/ReorgOp.java index 0d3863b..70fb797 100644 --- a/src/main/java/org/apache/sysml/hops/ReorgOp.java +++ b/src/main/java/org/apache/sysml/hops/ReorgOp.java @@ -90,7 +90,7 @@ public class ReorgOp extends Hop implements MultiThreadedHop public void checkArity() { int sz = _input.size(); switch( op ) { - case TRANSPOSE: + case TRANS: case DIAG: case REV: HopsException.check(sz == 1, this, "should have arity 1 for op %s but has arity %d", op, sz); @@ -131,7 +131,7 @@ public class ReorgOp extends Hop implements MultiThreadedHop if(!DMLScript.USE_ACCELERATOR) return false; switch( op ) { - case TRANSPOSE: { + case TRANS: { Lop lin; try { lin = getInput().get(0).constructLops(); @@ -166,7 +166,7 @@ public class ReorgOp extends Hop implements MultiThreadedHop switch( op ) { - case TRANSPOSE: + case TRANS: { Lop lin = getInput().get(0).constructLops(); if( lin instanceof Transform && ((Transform)lin).getOperationType()==OperationTypes.Transpose ) @@ -437,7 +437,7 @@ public class ReorgOp extends Hop implements MultiThreadedHop switch(op) { - case TRANSPOSE: + case TRANS: { // input is a [k1,k2] matrix and output is a [k2,k1] matrix // #nnz in output is exactly the same as in input @@ -556,7 +556,7 @@ public class ReorgOp extends Hop implements MultiThreadedHop switch(op) { - case TRANSPOSE: + case TRANS: { // input is a [k1,k2] matrix and output is a [k2,k1] matrix // #nnz in output is exactly the same as in input http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/TernaryOp.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/TernaryOp.java b/src/main/java/org/apache/sysml/hops/TernaryOp.java index 1e7876a..c7c7832 100644 --- a/src/main/java/org/apache/sysml/hops/TernaryOp.java +++ b/src/main/java/org/apache/sysml/hops/TernaryOp.java @@ -130,8 +130,8 @@ public class TernaryOp extends Hop if(!DMLScript.USE_ACCELERATOR) return false; switch( _op ) { - case CENTRALMOMENT: - case COVARIANCE: + case MOMENT: + case COV: case CTABLE: case INTERQUANTILE: case QUANTILE: @@ -155,11 +155,11 @@ public class TernaryOp extends Hop try { switch( _op ) { - case CENTRALMOMENT: + case MOMENT: constructLopsCentralMoment(); break; - case COVARIANCE: + case COV: constructLopsCovariance(); break; @@ -198,8 +198,8 @@ public class TernaryOp extends Hop */ private void constructLopsCentralMoment() { - if ( _op != OpOp3.CENTRALMOMENT ) - throw new HopsException("Unexpected operation: " + _op + ", expecting " + OpOp3.CENTRALMOMENT ); + if ( _op != OpOp3.MOMENT ) + throw new HopsException("Unexpected operation: " + _op + ", expecting " + OpOp3.MOMENT ); ExecType et = optFindExecType(); @@ -247,8 +247,8 @@ public class TernaryOp extends Hop * Method to construct LOPs when op = COVARIANCE. */ private void constructLopsCovariance() { - if ( _op != OpOp3.COVARIANCE ) - throw new HopsException("Unexpected operation: " + _op + ", expecting " + OpOp3.COVARIANCE ); + if ( _op != OpOp3.COV ) + throw new HopsException("Unexpected operation: " + _op + ", expecting " + OpOp3.COV ); ExecType et = optFindExecType(); http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBased.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBased.java b/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBased.java index 0dd9480..a4afa6d 100644 --- a/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBased.java +++ b/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBased.java @@ -696,7 +696,7 @@ public class PlanSelectionFuseCostBased extends PlanSelection current.getInput().get(1), 2) ? 1 : 16); break; case MINUS_NZ: case MINUS1_MULT: costs = 2; break; - case CENTRALMOMENT: + case MOMENT: int type = (int) (current.getInput().get(1) instanceof LiteralOp ? HopRewriteUtils.getIntValueSafe((LiteralOp)current.getInput().get(1)) : 2); switch( type ) { @@ -708,7 +708,7 @@ public class PlanSelectionFuseCostBased extends PlanSelection case 5: costs = 16; break; //variance } break; - case COVARIANCE: costs = 23; break; + case COV: costs = 23; break; default: LOG.warn("Cost model not " + "implemented yet for: "+((BinaryOp)current).getOp()); @@ -719,7 +719,7 @@ public class PlanSelectionFuseCostBased extends PlanSelection case PLUS_MULT: case MINUS_MULT: costs = 2; break; case CTABLE: costs = 3; break; - case CENTRALMOMENT: + case MOMENT: int type = (int) (current.getInput().get(1) instanceof LiteralOp ? HopRewriteUtils.getIntValueSafe((LiteralOp)current.getInput().get(1)) : 2); switch( type ) { @@ -731,7 +731,7 @@ public class PlanSelectionFuseCostBased extends PlanSelection case 5: costs = 17; break; //variance } break; - case COVARIANCE: costs = 23; break; + case COV: costs = 23; break; default: LOG.warn("Cost model not " + "implemented yet for: "+((TernaryOp)current).getOp()); http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java b/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java index 76735ea..1e5bcf3 100644 --- a/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java +++ b/src/main/java/org/apache/sysml/hops/codegen/opt/PlanSelectionFuseCostBasedV2.java @@ -48,7 +48,6 @@ import org.apache.sysml.hops.Hop.DataOpTypes; import org.apache.sysml.hops.Hop.Direction; import org.apache.sysml.hops.Hop.OpOp2; import org.apache.sysml.hops.Hop.OpOpN; -import org.apache.sysml.hops.Hop.ReOrgOp; import org.apache.sysml.hops.IndexingOp; import org.apache.sysml.hops.LiteralOp; import org.apache.sysml.hops.OptimizerUtils; @@ -718,7 +717,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection || HopRewriteUtils.isNary(hop, OpOpN.CBIND) || (hop instanceof AggBinaryOp && (inRow || !hop.dimsKnown() || (hop.getDim1()!=1 && hop.getDim2()!=1))) - || (HopRewriteUtils.isReorg(hop, ReOrgOp.TRANSPOSE) + || (HopRewriteUtils.isTransposeOperation(hop) && (hop.getDim1()!=1 && hop.getDim2()!=1)) || (hop instanceof AggUnaryOp && inRow); } @@ -1075,7 +1074,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection current.getInput().get(1), 2) ? 1 : 16); break; case MINUS_NZ: case MINUS1_MULT: costs = 2; break; - case CENTRALMOMENT: + case MOMENT: int type = (int) (current.getInput().get(1) instanceof LiteralOp ? HopRewriteUtils.getIntValueSafe((LiteralOp)current.getInput().get(1)) : 2); switch( type ) { @@ -1087,7 +1086,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection case 5: costs = 16; break; //variance } break; - case COVARIANCE: costs = 23; break; + case COV: costs = 23; break; default: LOG.warn("Cost model not " + "implemented yet for: "+((BinaryOp)current).getOp()); @@ -1099,7 +1098,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection case PLUS_MULT: case MINUS_MULT: costs = 2; break; case CTABLE: costs = 3; break; - case CENTRALMOMENT: + case MOMENT: int type = (int) (current.getInput().get(1) instanceof LiteralOp ? HopRewriteUtils.getIntValueSafe((LiteralOp)current.getInput().get(1)) : 2); switch( type ) { @@ -1111,7 +1110,7 @@ public class PlanSelectionFuseCostBasedV2 extends PlanSelection case 5: costs = 17; break; //variance } break; - case COVARIANCE: costs = 23; break; + case COV: costs = 23; break; default: LOG.warn("Cost model not " + "implemented yet for: "+((TernaryOp)current).getOp()); http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java b/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java index c490bbf..c6c42ae 100644 --- a/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java +++ b/src/main/java/org/apache/sysml/hops/rewrite/HopRewriteUtils.java @@ -502,7 +502,7 @@ public class HopRewriteUtils } public static ReorgOp createTranspose(Hop input) { - return createReorg(input, ReOrgOp.TRANSPOSE); + return createReorg(input, ReOrgOp.TRANS); } public static ReorgOp createReorg(Hop input, ReOrgOp rop) { @@ -853,7 +853,7 @@ public class HopRewriteUtils } public static boolean isTransposeOperation(Hop hop) { - return isReorg(hop, ReOrgOp.TRANSPOSE); + return isReorg(hop, ReOrgOp.TRANS); } public static boolean isTransposeOperation(Hop hop, int maxParents) { http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java index ae5bda3..545ffd8 100644 --- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java +++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationDynamic.java @@ -377,7 +377,7 @@ public class RewriteAlgebraicSimplificationDynamic extends HopRewriteRule apply |= (rop.getOp()==ReOrgOp.RESHAPE && HopRewriteUtils.isEqualSize(hi, input)); //1x1 dimensions of transpose/reshape -> no need for reorg - apply |= ((rop.getOp()==ReOrgOp.TRANSPOSE || rop.getOp()==ReOrgOp.RESHAPE) + apply |= ((rop.getOp()==ReOrgOp.TRANS || rop.getOp()==ReOrgOp.RESHAPE) && rop.getDim1()==1 && rop.getDim2()==1); if( apply ) { @@ -807,7 +807,7 @@ public class RewriteAlgebraicSimplificationDynamic extends HopRewriteRule { //reorg-operation-specific rewrite Hop hnew = null; - if( rhi.getOp() == ReOrgOp.TRANSPOSE ) + if( rhi.getOp() == ReOrgOp.TRANS ) hnew = HopRewriteUtils.createDataGenOp(input, true, input, true, 0); else if( rhi.getOp() == ReOrgOp.REV ) hnew = HopRewriteUtils.createDataGenOp(input, 0); http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java index a75167c..d1629ac 100644 --- a/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java +++ b/src/main/java/org/apache/sysml/hops/rewrite/RewriteAlgebraicSimplificationStatic.java @@ -906,7 +906,7 @@ public class RewriteAlgebraicSimplificationStatic extends HopRewriteRule && hi.getInput().get(0) instanceof ReorgOp ) //reorg operation { ReorgOp rop = (ReorgOp)hi.getInput().get(0); - if( (rop.getOp()==ReOrgOp.TRANSPOSE || rop.getOp()==ReOrgOp.RESHAPE + if( (rop.getOp()==ReOrgOp.TRANS || rop.getOp()==ReOrgOp.RESHAPE || rop.getOp() == ReOrgOp.REV ) //valid reorg && rop.getParent().size()==1 ) //uagg only reorg consumer { @@ -1043,7 +1043,7 @@ public class RewriteAlgebraicSimplificationStatic extends HopRewriteRule BinaryOp binary = (BinaryOp) hi.getInput().get(0); if( HopRewriteUtils.containsTransposeOperation(X.getParent()) - && !HopRewriteUtils.isValidOp(binary.getOp(), new OpOp2[]{OpOp2.CENTRALMOMENT, OpOp2.QUANTILE})) + && !HopRewriteUtils.isValidOp(binary.getOp(), new OpOp2[]{OpOp2.MOMENT, OpOp2.QUANTILE})) { //clear existing wiring HopRewriteUtils.removeChildReferenceByPos(parent, hi, pos); @@ -1591,7 +1591,7 @@ public class RewriteAlgebraicSimplificationStatic extends HopRewriteRule */ private static Hop removeUnnecessaryReorgOperation(Hop parent, Hop hi, int pos) { - ReOrgOp[] lookup = new ReOrgOp[]{ReOrgOp.TRANSPOSE, ReOrgOp.REV}; + ReOrgOp[] lookup = new ReOrgOp[]{ReOrgOp.TRANS, ReOrgOp.REV}; if( hi instanceof ReorgOp && HopRewriteUtils.isValidOp(((ReorgOp)hi).getOp(), lookup) ) //first reorg { http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/lops/ConvolutionTransform.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/lops/ConvolutionTransform.java b/src/main/java/org/apache/sysml/lops/ConvolutionTransform.java index 192adb5..cd232d0 100644 --- a/src/main/java/org/apache/sysml/lops/ConvolutionTransform.java +++ b/src/main/java/org/apache/sysml/lops/ConvolutionTransform.java @@ -30,10 +30,10 @@ public class ConvolutionTransform extends Lop public enum OperationTypes { - MAX_POOLING, MAX_POOLING_BACKWARD, AVG_POOLING, AVG_POOLING_BACKWARD, + MAX_POOL, MAX_POOL_BACKWARD, AVG_POOL, AVG_POOL_BACKWARD, RELU_MAX_POOLING, RELU_MAX_POOLING_BACKWARD, RELU_BACKWARD, - DIRECT_CONV2D, DIRECT_CONV2D_BACKWARD_FILTER, DIRECT_CONV2D_BACKWARD_DATA, - BIAS_ADD, DIRECT_CONV2D_BIAS_ADD, BIAS_MULTIPLY, CHANNEL_SUMS + CONV2D, CONV2D_BACKWARD_FILTER, CONV2D_BACKWARD_DATA, + BIAS_ADD, CONV2D_BIAS_ADD, BIAS_MULTIPLY, CHANNEL_SUMS } private OperationTypes operation = null; @@ -126,7 +126,7 @@ public class ConvolutionTransform extends Lop private String getOpcode() { switch(operation) { - case MAX_POOLING: + case MAX_POOL: return "maxpooling"; case RELU_MAX_POOLING: @@ -138,19 +138,19 @@ public class ConvolutionTransform extends Lop case RELU_BACKWARD: return "relu_backward"; - case MAX_POOLING_BACKWARD: + case MAX_POOL_BACKWARD: return "maxpooling_backward"; - case AVG_POOLING: + case AVG_POOL: return "avgpooling"; - case AVG_POOLING_BACKWARD: + case AVG_POOL_BACKWARD: return "avgpooling_backward"; - case DIRECT_CONV2D: + case CONV2D: return "conv2d"; - case DIRECT_CONV2D_BIAS_ADD: + case CONV2D_BIAS_ADD: return "conv2d_bias_add"; case BIAS_ADD: @@ -159,10 +159,10 @@ public class ConvolutionTransform extends Lop case BIAS_MULTIPLY: return "bias_multiply"; - case DIRECT_CONV2D_BACKWARD_FILTER: + case CONV2D_BACKWARD_FILTER: return "conv2d_backward_filter"; - case DIRECT_CONV2D_BACKWARD_DATA: + case CONV2D_BACKWARD_DATA: return "conv2d_backward_data"; case CHANNEL_SUMS: http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/parser/DMLTranslator.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/parser/DMLTranslator.java b/src/main/java/org/apache/sysml/parser/DMLTranslator.java index 9b5f02e..66f3c8e 100644 --- a/src/main/java/org/apache/sysml/parser/DMLTranslator.java +++ b/src/main/java/org/apache/sysml/parser/DMLTranslator.java @@ -40,9 +40,11 @@ import org.apache.sysml.hops.FunctionOp; import org.apache.sysml.hops.FunctionOp.FunctionType; import org.apache.sysml.hops.Hop; import org.apache.sysml.hops.Hop.AggOp; +import org.apache.sysml.hops.Hop.ConvOp; import org.apache.sysml.hops.Hop.DataGenMethod; import org.apache.sysml.hops.Hop.DataOpTypes; import org.apache.sysml.hops.Hop.Direction; +import org.apache.sysml.hops.Hop.OpOp1; import org.apache.sysml.hops.Hop.OpOpN; import org.apache.sysml.hops.Hop.OpOp2; import org.apache.sysml.hops.Hop.OpOp3; @@ -2077,32 +2079,21 @@ public class DMLTranslator case PF: case PCHISQ: case PEXP: - currBuiltinOp = constructDfHop(target.getName(), target.getDataType(), target.getValueType(), source.getOpCode(), paramHops); + currBuiltinOp = constructDfHop(target.getName(), target.getDataType(), + target.getValueType(), source.getOpCode(), paramHops); break; case GROUPEDAGG: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), ParamBuiltinOp.GROUPEDAGG, paramHops); - break; - case RMEMPTY: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), ParamBuiltinOp.RMEMPTY, paramHops); - break; - case REPLACE: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), ParamBuiltinOp.REPLACE, paramHops); - break; - case LOWER_TRI: - currBuiltinOp = new ParameterizedBuiltinOp(target.getName(), target.getDataType(), - target.getValueType(), ParamBuiltinOp.LOWER_TRI, paramHops); - break; - case UPPER_TRI: + case TRANSFORMAPPLY: + case TRANSFORMDECODE: + case TRANSFORMCOLMAP: + case TRANSFORMMETA: currBuiltinOp = new ParameterizedBuiltinOp(target.getName(), target.getDataType(), - target.getValueType(), ParamBuiltinOp.UPPER_TRI, paramHops); + target.getValueType(), ParamBuiltinOp.valueOf(source.getOpCode().name()), paramHops); break; case ORDER: @@ -2114,30 +2105,6 @@ public class DMLTranslator currBuiltinOp = new ReorgOp(target.getName(), target.getDataType(), target.getValueType(), ReOrgOp.SORT, inputs); break; - case TRANSFORMAPPLY: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), - ParamBuiltinOp.TRANSFORMAPPLY, paramHops); - break; - - case TRANSFORMDECODE: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), - ParamBuiltinOp.TRANSFORMDECODE, paramHops); - break; - - case TRANSFORMCOLMAP: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), - ParamBuiltinOp.TRANSFORMCOLMAP, paramHops); - break; - - case TRANSFORMMETA: - currBuiltinOp = new ParameterizedBuiltinOp( - target.getName(), target.getDataType(), target.getValueType(), - ParamBuiltinOp.TRANSFORMMETA, paramHops); - break; - case TOSTRING: //check for input data type and only compile toString Hop for matrices/frames, //for scalars, we compile (s + "") to ensure consistent string output value types @@ -2329,34 +2296,20 @@ public class DMLTranslator switch (source.getOpCode()) { case EVAL: - currBuiltinOp = new NaryOp(target.getName(), target.getDataType(), target.getValueType(), OpOpN.EVAL, processAllExpressions(source.getAllExpr(), hops)); + currBuiltinOp = new NaryOp(target.getName(), target.getDataType(), target.getValueType(), + OpOpN.EVAL, processAllExpressions(source.getAllExpr(), hops)); break; case COLSUM: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.SUM, - Direction.Col, expr); - break; - case COLMAX: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.MAX, - Direction.Col, expr); - break; - case COLMIN: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.MIN, - Direction.Col, expr); - break; - case COLMEAN: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.MEAN, - Direction.Col, expr); - break; - case COLPROD: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.PROD, - Direction.Col, expr); + case COLVAR: + currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), + AggOp.valueOf(source.getOpCode().name().substring(3)), Direction.Col, expr); break; - + case COLSD: // colStdDevs = sqrt(colVariances) currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), @@ -2365,19 +2318,14 @@ public class DMLTranslator target.getValueType(), Hop.OpOp1.SQRT, currBuiltinOp); break; - case COLVAR: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), - target.getValueType(), AggOp.VAR, Direction.Col, expr); - break; - case ROWSUM: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.SUM, - Direction.Row, expr); - break; - + case ROWMIN: case ROWMAX: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.MAX, - Direction.Row, expr); + case ROWMEAN: + case ROWPROD: + case ROWVAR: + currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), + AggOp.valueOf(source.getOpCode().name().substring(3)), Direction.Row, expr); break; case ROWINDEXMAX: @@ -2390,21 +2338,6 @@ public class DMLTranslator Direction.Row, expr); break; - case ROWMIN: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.MIN, - Direction.Row, expr); - break; - - case ROWMEAN: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.MEAN, - Direction.Row, expr); - break; - - case ROWPROD: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.PROD, - Direction.Row, expr); - break; - case ROWSD: // rowStdDevs = sqrt(rowVariances) currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), @@ -2413,50 +2346,24 @@ public class DMLTranslator target.getValueType(), Hop.OpOp1.SQRT, currBuiltinOp); break; - case ROWVAR: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), - target.getValueType(), AggOp.VAR, Direction.Row, expr); - break; - case NROW: // If the dimensions are available at compile time, then create a LiteralOp (constant propagation) // Else create a UnaryOp so that a control program instruction is generated - - long nRows = expr.getDim1(); - if (nRows == -1) { - currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), Hop.OpOp1.NROW, expr); - } - else { - currBuiltinOp = new LiteralOp(nRows); - } + currBuiltinOp = (expr.getDim1()==-1) ? new UnaryOp(target.getName(), target.getDataType(), + target.getValueType(), Hop.OpOp1.NROW, expr) : new LiteralOp(expr.getDim1()); break; case NCOL: // If the dimensions are available at compile time, then create a LiteralOp (constant propagation) // Else create a UnaryOp so that a control program instruction is generated - - long nCols = expr.getDim2(); - if (nCols == -1) { - currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), Hop.OpOp1.NCOL, expr); - } - else { - currBuiltinOp = new LiteralOp(nCols); - } + currBuiltinOp = (expr.getDim2()==-1) ? new UnaryOp(target.getName(), target.getDataType(), + target.getValueType(), Hop.OpOp1.NCOL, expr) : new LiteralOp(expr.getDim2()); break; case LENGTH: - long nRows2 = expr.getDim1(); - long nCols2 = expr.getDim2(); - /* - * If the dimensions are available at compile time, then create a LiteralOp (constant propagation) - * Else create a UnaryOp so that a control program instruction is generated - */ - if ((nCols2 == -1) || (nRows2 == -1)) { - currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), Hop.OpOp1.LENGTH, expr); - } - else { - long lval = (nCols2 * nRows2); - currBuiltinOp = new LiteralOp(lval); - } + // If the dimensions are available at compile time, then create a LiteralOp (constant propagation) + // Else create a UnaryOp so that a control program instruction is generated + currBuiltinOp = (expr.getDim1()==-1 || expr.getDim2()==-1) ? new UnaryOp(target.getName(), target.getDataType(), + target.getValueType(), Hop.OpOp1.LENGTH, expr) : new LiteralOp(expr.getDim1()*expr.getDim2()); break; case EXISTS: @@ -2465,10 +2372,12 @@ public class DMLTranslator break; case SUM: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.SUM, - Direction.RowCol, expr); + case PROD: + case VAR: + currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), + AggOp.valueOf(source.getOpCode().name()), Direction.RowCol, expr); break; - + case MEAN: if ( expr2 == null ) { // example: x = mean(Y); @@ -2480,7 +2389,7 @@ public class DMLTranslator // stable weighted mean is implemented by using centralMoment with order = 0 Hop orderHop = new LiteralOp(0); currBuiltinOp=new TernaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp3.CENTRALMOMENT, expr, expr2, orderHop); + Hop.OpOp3.MOMENT, expr, expr2, orderHop); } break; @@ -2493,34 +2402,14 @@ public class DMLTranslator target.getValueType(), Hop.OpOp1.SQRT, currBuiltinOp); break; - case VAR: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), - target.getValueType(), AggOp.VAR, Direction.RowCol, expr); - break; - case MIN: - //construct AggUnary for min(X) but BinaryOp for min(X,Y) - if( expr2 == null ) { - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), - AggOp.MIN, Direction.RowCol, expr); - } - else { - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), OpOp2.MIN, - expr, expr2); - } - break; - case MAX: - //construct AggUnary for max(X) but BinaryOp for max(X,Y) - if( expr2 == null ) { - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), - AggOp.MAX, Direction.RowCol, expr); - } else { - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), OpOp2.MAX, - expr, expr2); - } + //construct AggUnary for min(X) but BinaryOp for min(X,Y) + currBuiltinOp = (expr2 == null) ? new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), + AggOp.valueOf(source.getOpCode().name()), Direction.RowCol, expr) : new BinaryOp(target.getName(), + target.getDataType(), target.getValueType(), OpOp2.valueOf(source.getOpCode().name()), expr, expr2); break; - + case PPRED: String sop = ((StringIdentifier)source.getThirdExpr()).getValue(); sop = sop.replace("\"", ""); @@ -2544,23 +2433,16 @@ public class DMLTranslator currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), operation, expr, expr2); break; - case PROD: - currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.PROD, - Direction.RowCol, expr); - break; case TRACE: currBuiltinOp = new AggUnaryOp(target.getName(), target.getDataType(), target.getValueType(), AggOp.TRACE, Direction.RowCol, expr); break; case TRANS: - currBuiltinOp = new ReorgOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.ReOrgOp.TRANSPOSE, expr); - break; - + case DIAG: case REV: - currBuiltinOp = new ReorgOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.ReOrgOp.REV, expr); + currBuiltinOp = new ReorgOp(target.getName(), target.getDataType(), + target.getValueType(), ReOrgOp.valueOf(source.getOpCode().name()), expr); break; case CBIND: @@ -2572,11 +2454,6 @@ public class DMLTranslator new NaryOp(target.getName(), target.getDataType(), target.getValueType(), appendOp2, processAllExpressions(source.getAllExpr(), hops)); break; - - case DIAG: - currBuiltinOp = new ReorgOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.ReOrgOp.DIAG, expr); - break; case TABLE: @@ -2647,28 +2524,13 @@ public class DMLTranslator // Boolean binary case XOR: - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), - target.getValueType(), Hop.OpOp2.XOR, expr, expr2); - break; case BITWAND: - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), - target.getValueType(), OpOp2.BITWAND, expr, expr2); - break; case BITWOR: - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), - target.getValueType(), OpOp2.BITWOR, expr, expr2); - break; case BITWXOR: - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), - target.getValueType(), OpOp2.BITWXOR, expr, expr2); - break; case BITWSHIFTL: - currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), - target.getValueType(), OpOp2.BITWSHIFTL, expr, expr2); - break; case BITWSHIFTR: currBuiltinOp = new BinaryOp(target.getName(), target.getDataType(), - target.getValueType(), OpOp2.BITWSHIFTR, expr, expr2); + target.getValueType(), OpOp2.valueOf(source.getOpCode().name()), expr, expr2); break; case ABS: @@ -2691,80 +2553,10 @@ public class DMLTranslator case CUMPROD: case CUMMIN: case CUMMAX: - Hop.OpOp1 mathOp1; - switch (source.getOpCode()) { - case ABS: - mathOp1 = Hop.OpOp1.ABS; - break; - case SIN: - mathOp1 = Hop.OpOp1.SIN; - break; - case COS: - mathOp1 = Hop.OpOp1.COS; - break; - case TAN: - mathOp1 = Hop.OpOp1.TAN; - break; - case ASIN: - mathOp1 = Hop.OpOp1.ASIN; - break; - case ACOS: - mathOp1 = Hop.OpOp1.ACOS; - break; - case ATAN: - mathOp1 = Hop.OpOp1.ATAN; - break; - case SINH: - mathOp1 = Hop.OpOp1.SINH; - break; - case COSH: - mathOp1 = Hop.OpOp1.COSH; - break; - case TANH: - mathOp1 = Hop.OpOp1.TANH; - break; - case SIGN: - mathOp1 = Hop.OpOp1.SIGN; - break; - case SQRT: - mathOp1 = Hop.OpOp1.SQRT; - break; - case EXP: - mathOp1 = Hop.OpOp1.EXP; - break; - case ROUND: - mathOp1 = Hop.OpOp1.ROUND; - break; - case CEIL: - mathOp1 = Hop.OpOp1.CEIL; - break; - case FLOOR: - mathOp1 = Hop.OpOp1.FLOOR; - break; - case CUMSUM: - mathOp1 = Hop.OpOp1.CUMSUM; - break; - case CUMPROD: - mathOp1 = Hop.OpOp1.CUMPROD; - break; - case CUMMIN: - mathOp1 = Hop.OpOp1.CUMMIN; - break; - case CUMMAX: - mathOp1 = Hop.OpOp1.CUMMAX; - break; - default: - - LOG.error(source.printErrorLocation() + - "processBuiltinFunctionExpression():: Could not find Operation type for builtin function: " - + source.getOpCode()); - - throw new ParseException(source.printErrorLocation() + - "processBuiltinFunctionExpression():: Could not find Operation type for builtin function: " - + source.getOpCode()); - } - currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), mathOp1, expr); + currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), + OpOp1.valueOf(source.getOpCode().name()), expr); break; + case LOG: if (expr2 == null) { Hop.OpOp1 mathOp2; @@ -2804,71 +2596,21 @@ public class DMLTranslator expr, expr2); } break; + case MOMENT: - if (expr3 == null){ - currBuiltinOp=new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp2.CENTRALMOMENT, expr, expr2); - } - else { - currBuiltinOp=new TernaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp3.CENTRALMOMENT, expr, expr2,expr3); - } - break; - case COV: - if (expr3 == null){ - currBuiltinOp=new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp2.COVARIANCE, expr, expr2); - } - else { - currBuiltinOp=new TernaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp3.COVARIANCE, expr, expr2,expr3); - } - break; - case QUANTILE: - if (expr3 == null){ - currBuiltinOp=new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp2.QUANTILE, expr, expr2); - } - else { - currBuiltinOp=new TernaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp3.QUANTILE, expr, expr2,expr3); - } - break; - case INTERQUANTILE: - if ( expr3 == null ) { - currBuiltinOp=new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp2.INTERQUANTILE, expr, expr2); - } - else { - currBuiltinOp=new TernaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp3.INTERQUANTILE, expr, expr2,expr3); - } - break; - - case IQM: - if ( expr2 == null ) { - currBuiltinOp=new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp1.IQM, expr); - } - else { - currBuiltinOp=new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp2.IQM, expr, expr2); - } - break; + currBuiltinOp = (expr3 == null) ? new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), + OpOp2.valueOf(source.getOpCode().name()), expr, expr2) : new TernaryOp(target.getName(), target.getDataType(), + target.getValueType(), OpOp3.valueOf(source.getOpCode().name()), expr, expr2,expr3); + break; + case IQM: case MEDIAN: - if ( expr2 == null ) { - currBuiltinOp=new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp1.MEDIAN, expr); - } - else { - currBuiltinOp=new BinaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp2.MEDIAN, expr, expr2); - } - break; + currBuiltinOp = (expr2 == null) ? new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), + OpOp1.valueOf(source.getOpCode().name()), expr) : new BinaryOp(target.getName(), target.getDataType(), + target.getValueType(), OpOp2.valueOf(source.getOpCode().name()), expr, expr2); case IFELSE: currBuiltinOp=new TernaryOp(target.getName(), target.getDataType(), target.getValueType(), @@ -2933,14 +2675,10 @@ public class DMLTranslator break; case INVERSE: - currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp1.INVERSE, expr); - break; - case CHOLESKY: currBuiltinOp = new UnaryOp(target.getName(), target.getDataType(), target.getValueType(), - Hop.OpOp1.CHOLESKY, expr); - break; + OpOp1.valueOf(source.getOpCode().name()), expr); + break; case OUTER: if( !(expr3 instanceof LiteralOp) ) @@ -2953,74 +2691,40 @@ public class DMLTranslator ((BinaryOp)currBuiltinOp).setOuterVectorOperation(true); //flag op as specific outer vector operation currBuiltinOp.refreshSizeInformation(); //force size reevaluation according to 'outer' flag otherwise danger of incorrect dims break; - - case CONV2D: - { - Hop image = expr; - ArrayList<Hop> inHops1 = getALHopsForConvOp(image, source, 1, hops); - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.DIRECT_CONV2D, inHops1); - setBlockSizeAndRefreshSizeInfo(image, currBuiltinOp); - break; - } + case BIAS_ADD: - { - ArrayList<Hop> inHops1 = new ArrayList<>(); - inHops1.add(expr); - inHops1.add(expr2); - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.BIAS_ADD, inHops1); - setBlockSizeAndRefreshSizeInfo(expr, currBuiltinOp); - break; - } - case BIAS_MULTIPLY: - { + case BIAS_MULTIPLY: { ArrayList<Hop> inHops1 = new ArrayList<>(); inHops1.add(expr); inHops1.add(expr2); - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.BIAS_MULTIPLY, inHops1); + currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), + ConvOp.valueOf(source.getOpCode().name()), inHops1); setBlockSizeAndRefreshSizeInfo(expr, currBuiltinOp); break; } case AVG_POOL: - case MAX_POOL: - { - Hop image = expr; - ArrayList<Hop> inHops1 = getALHopsForPoolingForwardIM2COL(image, source, 1, hops); - if(source.getOpCode() == BuiltinFunctionOp.MAX_POOL) - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.MAX_POOLING, inHops1); - else - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.AVG_POOLING, inHops1); - setBlockSizeAndRefreshSizeInfo(image, currBuiltinOp); + case MAX_POOL: { + currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), + ConvOp.valueOf(source.getOpCode().name()), getALHopsForPoolingForwardIM2COL(expr, source, 1, hops)); + setBlockSizeAndRefreshSizeInfo(expr, currBuiltinOp); break; } case AVG_POOL_BACKWARD: - case MAX_POOL_BACKWARD: - { - Hop image = expr; - ArrayList<Hop> inHops1 = getALHopsForConvOpPoolingCOL2IM(image, source, 1, hops); // process dout as well - if(source.getOpCode() == BuiltinFunctionOp.MAX_POOL_BACKWARD) - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.MAX_POOLING_BACKWARD, inHops1); - else - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.AVG_POOLING_BACKWARD, inHops1); - setBlockSizeAndRefreshSizeInfo(image, currBuiltinOp); + case MAX_POOL_BACKWARD: { + currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), + ConvOp.valueOf(source.getOpCode().name()), getALHopsForConvOpPoolingCOL2IM(expr, source, 1, hops)); + setBlockSizeAndRefreshSizeInfo(expr, currBuiltinOp); break; } + case CONV2D: case CONV2D_BACKWARD_FILTER: - { - Hop image = expr; - ArrayList<Hop> inHops1 = getALHopsForConvOp(image, source, 1, hops); - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.DIRECT_CONV2D_BACKWARD_FILTER, inHops1); - setBlockSizeAndRefreshSizeInfo(image, currBuiltinOp); - break; - } - case CONV2D_BACKWARD_DATA: - { - Hop image = expr; - ArrayList<Hop> inHops1 = getALHopsForConvOp(image, source, 1, hops); - currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), Hop.ConvOp.DIRECT_CONV2D_BACKWARD_DATA, inHops1); - setBlockSizeAndRefreshSizeInfo(image, currBuiltinOp); + case CONV2D_BACKWARD_DATA: { + currBuiltinOp = new ConvolutionOp(target.getName(), target.getDataType(), target.getValueType(), + ConvOp.valueOf(source.getOpCode().name()), getALHopsForConvOp(expr, source, 1, hops)); + setBlockSizeAndRefreshSizeInfo(expr, currBuiltinOp); break; } - + default: throw new ParseException("Unsupported builtin function type: "+source.getOpCode()); } http://git-wip-us.apache.org/repos/asf/systemml/blob/eb852482/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java index 9059e0f..536acb3 100644 --- a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java +++ b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptimizerRuleBased.java @@ -1305,7 +1305,7 @@ public class OptimizerRuleBased extends Optimizer && !( h instanceof UnaryOp //only unaryop-cumulativeagg && !((UnaryOp)h).isCumulativeUnaryOperation() ) && !( h instanceof ReorgOp //only reorgop-transpose - && ((ReorgOp)h).getOp() != ReOrgOp.TRANSPOSE )) + && ((ReorgOp)h).getOp() != ReOrgOp.TRANS )) { MultiThreadedHop mhop = (MultiThreadedHop) h; mhop.setMaxNumThreads(opsK); //set max constraint in hop
