[BUGFIX] Revert all the files of commit 95cbbd6

- This also contains a instruction parsing-related bugfix introduced by the 
commit 25a10f4
- However, there is also NPE error from recent commits related to Spark 
bcumoffk instruction. This error will be fixed in later commits as it is 
independent of the commit 95cbbd6.

Closes #851.


Project: http://git-wip-us.apache.org/repos/asf/systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/c3fdbb4d
Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/c3fdbb4d
Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/c3fdbb4d

Branch: refs/heads/master
Commit: c3fdbb4da7c6cac4d363b31366ae21ef976cde92
Parents: 25a10f4
Author: Niketan Pansare <npan...@us.ibm.com>
Authored: Fri Dec 7 10:51:36 2018 -0800
Committer: Niketan Pansare <npan...@us.ibm.com>
Committed: Fri Dec 7 10:51:36 2018 -0800

----------------------------------------------------------------------
 .../api/mlcontext/MLContextConversionUtil.java  |  5 +-
 src/main/java/org/apache/sysml/hops/DnnOp.java  | 17 ++--
 .../java/org/apache/sysml/hops/IndexingOp.java  |  5 +-
 .../org/apache/sysml/hops/OptimizerUtils.java   | 13 ++-
 .../sysml/hops/codegen/cplan/CNodeNary.java     |  5 +-
 .../hops/recompile/LiteralReplacement.java      |  9 +-
 .../java/org/apache/sysml/lops/compile/Dag.java | 15 ++--
 .../apache/sysml/lops/runtime/RunMRJobs.java    |  3 +-
 .../controlprogram/ParForProgramBlock.java      | 19 ++--
 .../controlprogram/caching/ByteBuffer.java      |  5 +-
 .../controlprogram/caching/CacheStatistics.java | 14 +--
 .../controlprogram/caching/FrameObject.java     | 15 ++--
 .../controlprogram/caching/MatrixObject.java    | 19 ++--
 .../context/ExecutionContext.java               |  3 +-
 .../context/SparkExecutionContext.java          | 31 ++++---
 .../controlprogram/paramserv/LocalPSWorker.java |  3 +-
 .../paramserv/ParamservUtils.java               |  5 +-
 .../paramserv/dp/DRLocalScheme.java             |  3 +-
 .../paramserv/dp/DRSparkScheme.java             |  5 +-
 .../paramserv/dp/DataPartitionSparkScheme.java  |  3 +-
 .../dp/DataPartitionerSparkAggregator.java      |  9 +-
 .../paramserv/dp/SparkDataPartitioner.java      |  7 +-
 .../paramserv/rpc/PSRpcObject.java              |  3 +-
 .../controlprogram/parfor/DataPartitioner.java  |  7 +-
 .../parfor/DataPartitionerLocal.java            | 25 +++---
 .../parfor/DataPartitionerRemoteMR.java         |  3 +-
 .../parfor/DataPartitionerRemoteMapper.java     | 13 ++-
 .../parfor/DataPartitionerRemoteSpark.java      |  3 +-
 .../DataPartitionerRemoteSparkMapper.java       |  5 +-
 .../controlprogram/parfor/RemoteDPParForMR.java | 23 +++--
 .../parfor/RemoteDPParForSpark.java             |  5 +-
 .../parfor/RemoteDPParForSparkWorker.java       | 19 ++--
 .../parfor/RemoteDPParWorkerReducer.java        | 21 +++--
 .../RemoteParForColocatedNLineInputFormat.java  |  3 +-
 .../controlprogram/parfor/RemoteParForMR.java   | 19 ++--
 .../parfor/RemoteParForSparkWorker.java         |  5 +-
 .../parfor/RemoteParWorkerMapper.java           |  3 +-
 .../parfor/ResultMergeLocalFile.java            |  5 +-
 .../parfor/ResultMergeLocalMemory.java          |  5 +-
 .../parfor/ResultMergeRemoteMR.java             |  3 +-
 .../parfor/ResultMergeRemotePartitioning.java   |  5 +-
 .../parfor/ResultMergeRemoteSpark.java          |  5 +-
 .../parfor/TaskPartitionerStatic.java           |  3 +-
 .../instructions/CPInstructionParser.java       |  4 +-
 .../instructions/SPInstructionParser.java       |  6 +-
 .../cp/CentralMomentCPInstruction.java          |  3 +-
 .../instructions/cp/CtableCPInstruction.java    |  5 +-
 .../instructions/cp/DataGenCPInstruction.java   |  5 +-
 .../cp/DataPartitionCPInstruction.java          |  5 +-
 .../instructions/cp/DnnCPInstruction.java       | 11 ++-
 .../cp/FrameIndexingCPInstruction.java          |  3 +-
 .../instructions/cp/IndexingCPInstruction.java  |  9 +-
 .../cp/ListIndexingCPInstruction.java           |  9 +-
 .../runtime/instructions/cp/ListObject.java     |  5 +-
 .../cp/MatrixIndexingCPInstruction.java         |  7 +-
 .../cp/MatrixReshapeCPInstruction.java          |  5 +-
 .../instructions/cp/PMMJCPInstruction.java      |  3 +-
 .../cp/ParameterizedBuiltinCPInstruction.java   |  3 +-
 .../cp/ParamservBuiltinCPInstruction.java       |  3 +-
 .../instructions/cp/ReorgCPInstruction.java     |  3 +-
 .../cp/StringInitCPInstruction.java             |  7 +-
 .../gpu/AggregateBinaryGPUInstruction.java      |  5 +-
 .../gpu/AggregateUnaryGPUInstruction.java       |  5 +-
 .../instructions/gpu/DnnGPUInstruction.java     | 58 ++++++------
 .../instructions/gpu/MMTSJGPUInstruction.java   |  3 +-
 .../gpu/MatrixIndexingGPUInstruction.java       |  9 +-
 .../MatrixMatrixArithmeticGPUInstruction.java   |  3 +-
 .../gpu/MatrixMatrixAxpyGPUInstruction.java     |  3 +-
 ...rixMatrixRelationalBinaryGPUInstruction.java |  3 +-
 .../gpu/MatrixReshapeGPUInstruction.java        |  5 +-
 .../instructions/gpu/ReorgGPUInstruction.java   |  5 +-
 .../ScalarMatrixArithmeticGPUInstruction.java   |  5 +-
 ...larMatrixRelationalBinaryGPUInstruction.java |  5 +-
 .../mr/AggregateBinaryInstruction.java          |  5 +-
 .../spark/AppendGSPInstruction.java             |  5 +-
 .../instructions/spark/CastSPInstruction.java   |  3 +-
 .../spark/CentralMomentSPInstruction.java       |  3 +-
 .../instructions/spark/CpmmSPInstruction.java   |  7 +-
 .../spark/CumulativeAggregateSPInstruction.java |  7 +-
 .../spark/CumulativeOffsetSPInstruction.java    |  3 +-
 .../instructions/spark/DnnSPInstruction.java    | 13 ++-
 .../spark/FrameIndexingSPInstruction.java       |  7 +-
 .../spark/MapmmChainSPInstruction.java          |  3 +-
 .../instructions/spark/MapmmSPInstruction.java  | 19 ++--
 .../spark/MatrixAppendMSPInstruction.java       | 13 ++-
 .../spark/MatrixIndexingSPInstruction.java      | 19 ++--
 ...ReturnParameterizedBuiltinSPInstruction.java |  5 +-
 .../instructions/spark/PMapmmSPInstruction.java |  7 +-
 .../ParameterizedBuiltinSPInstruction.java      | 31 ++++---
 .../instructions/spark/PmmSPInstruction.java    |  3 +-
 .../spark/QuantilePickSPInstruction.java        |  5 +-
 .../spark/QuaternarySPInstruction.java          | 13 ++-
 .../instructions/spark/RandSPInstruction.java   | 15 ++--
 .../instructions/spark/ReorgSPInstruction.java  |  5 +-
 .../instructions/spark/RmmSPInstruction.java    |  3 +-
 .../instructions/spark/SpoofSPInstruction.java  | 17 ++--
 .../instructions/spark/Tsmm2SPInstruction.java  | 27 +++---
 .../spark/data/BlockPartitioner.java            |  7 +-
 .../spark/data/PartitionedBlock.java            |  5 +-
 .../spark/data/PartitionedBroadcast.java        |  5 +-
 .../functions/ExtractBlockForBinaryReblock.java |  5 +-
 .../spark/functions/ExtractGroup.java           |  5 +-
 .../MatrixVectorBinaryOpPartitionFunction.java  |  5 +-
 .../spark/utils/FrameRDDConverterUtils.java     | 45 +++++-----
 .../spark/utils/RDDConverterUtils.java          | 51 ++++++-----
 .../spark/utils/RDDConverterUtilsExt.java       | 15 ++--
 .../instructions/spark/utils/RDDSortUtils.java  | 27 +++---
 .../instructions/spark/utils/SparkUtils.java    |  7 +-
 .../runtime/matrix/data/LibMatrixCUDA.java      |  6 +-
 .../sysml/runtime/matrix/data/MatrixBlock.java  | 93 ++++++++++----------
 .../java/org/apache/sysml/utils/IntUtils.java   | 34 -------
 111 files changed, 512 insertions(+), 637 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java 
b/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
index 1a06fa0..5d2478f 100644
--- a/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
+++ b/src/main/java/org/apache/sysml/api/mlcontext/MLContextConversionUtil.java
@@ -65,7 +65,6 @@ import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.data.Pair;
 import org.apache.sysml.runtime.util.DataConverter;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 
 import scala.collection.JavaConversions;
 import scala.reflect.ClassTag;
@@ -300,7 +299,7 @@ public class MLContextConversionUtil {
                        frameMetadata.asMatrixCharacteristics() : new 
MatrixCharacteristics();
                ValueType[] schema = (frameMetadata != null) ?
                        frameMetadata.getFrameSchema().getSchema().toArray(new 
ValueType[0]) : 
-                       UtilFunctions.nCopies(IntUtils.toInt(mc.getCols()), 
ValueType.STRING);
+                       UtilFunctions.nCopies((int)mc.getCols(), 
ValueType.STRING);
                
                FrameObject frameObject = new 
FrameObject(OptimizerUtils.getUniqueTempFileName(),
                        new MetaDataFormat(mc, 
OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo), schema);
@@ -690,7 +689,7 @@ public class MLContextConversionUtil {
                try {
                        ValueType[] lschema = null;
                        if (lschema == null)
-                               lschema = 
UtilFunctions.nCopies(IntUtils.toInt(mc.getCols()), ValueType.STRING);
+                               lschema = UtilFunctions.nCopies((int) 
mc.getCols(), ValueType.STRING);
                        rdd = 
FrameRDDConverterUtils.textCellToBinaryBlock(jsc(), javaPairRDDText, mc, 
lschema);
                } catch (DMLRuntimeException e) {
                        e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/hops/DnnOp.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/DnnOp.java 
b/src/main/java/org/apache/sysml/hops/DnnOp.java
index 275d0f0..a948eed 100644
--- a/src/main/java/org/apache/sysml/hops/DnnOp.java
+++ b/src/main/java/org/apache/sysml/hops/DnnOp.java
@@ -32,7 +32,6 @@ import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.instructions.gpu.context.GPUContextPool;
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
 import org.apache.sysml.runtime.matrix.data.DnnParameters;
-import org.apache.sysml.utils.IntUtils;
 
 import java.util.ArrayList;
 
@@ -380,18 +379,18 @@ public class DnnOp extends MultiThreadedHop
        private static class IntermediateDimensions {
                int dim1; int dim2; double sp;
                public IntermediateDimensions(DnnOp h, String dim1Str, String 
dim2Str, double sp) {
-                       dim1 = IntUtils.toInt(h.getDim(dim1Str));
-                       dim2 = IntUtils.toInt(h.getDim(dim2Str));
+                       dim1 = (int) h.getDim(dim1Str);
+                       dim2 = (int) h.getDim(dim2Str);
                        this.sp = sp;
                }
                public IntermediateDimensions(DnnOp h, String dim1Str, String 
dim2Str) {
-                       dim1 = IntUtils.toInt(h.getDim(dim1Str));
-                       dim2 = IntUtils.toInt(h.getDim(dim2Str));
+                       dim1 = (int) h.getDim(dim1Str);
+                       dim2 = (int) h.getDim(dim2Str);
                        sp = 1;
                }
                public IntermediateDimensions(DnnOp h, int dim1, String 
dim2Str) {
                        this.dim1 = dim1;
-                       dim2 = IntUtils.toInt(h.getDim(dim2Str));
+                       dim2 = (int) h.getDim(dim2Str);
                        sp = 1;
                }
                
@@ -450,7 +449,7 @@ public class DnnOp extends MultiThreadedHop
                        ArrayList<IntermediateDimensions> gpuIntermediates,
                        ArrayList<IntermediateDimensions> cpIntermediates) {
                // Since CP operators use row-level parallelism by default
-               int numWorkers = 
IntUtils.toInt(Math.min(OptimizerUtils.getConstrainedNumThreads(_maxNumThreads),
 Math.max(getDim("N"), 1)));
+               int numWorkers = (int) 
Math.min(OptimizerUtils.getConstrainedNumThreads(_maxNumThreads), 
Math.max(getDim("N"), 1));
                if(ConfigurationManager.isGPU()) {
                        // Account for potential sparse-to-dense conversion
                        double gpuMemBudget = 
IntermediateDimensions.addEstimateSizes(gpuIntermediates, 1);
@@ -678,10 +677,10 @@ public class DnnOp extends MultiThreadedHop
                // P = as.integer(floor((H + 2*pad_h - R)/stride_h + 1))
                // Q = as.integer(floor((W + 2*pad_w - S)/stride_w + 1))
                if(_cachedParams.P < 0 && _cachedParams.H >= 0 && 
_cachedParams.R >= 0 && _cachedParams.stride_h >= 0 && _cachedParams.pad_h >= 
0) {
-                       _cachedParams.P = 
IntUtils.toInt(org.apache.sysml.runtime.util.DnnUtils.getP(_cachedParams.H, 
_cachedParams.R, _cachedParams.stride_h, _cachedParams.pad_h));
+                       _cachedParams.P = (int) 
org.apache.sysml.runtime.util.DnnUtils.getP(_cachedParams.H, _cachedParams.R, 
_cachedParams.stride_h, _cachedParams.pad_h);
                }
                if(_cachedParams.Q < 0 && _cachedParams.W >= 0 && 
_cachedParams.S >= 0 && _cachedParams.stride_w >= 0 && _cachedParams.pad_w >= 
0) {
-                       _cachedParams.Q = 
IntUtils.toInt(org.apache.sysml.runtime.util.DnnUtils.getQ(_cachedParams.W, 
_cachedParams.S, _cachedParams.stride_w, _cachedParams.pad_w));
+                       _cachedParams.Q = (int) 
org.apache.sysml.runtime.util.DnnUtils.getQ(_cachedParams.W, _cachedParams.S, 
_cachedParams.stride_w, _cachedParams.pad_w);
                }
                
                return _cachedParams;

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/hops/IndexingOp.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/IndexingOp.java 
b/src/main/java/org/apache/sysml/hops/IndexingOp.java
index 8d12da7..1091027 100644
--- a/src/main/java/org/apache/sysml/hops/IndexingOp.java
+++ b/src/main/java/org/apache/sysml/hops/IndexingOp.java
@@ -31,7 +31,6 @@ import org.apache.sysml.lops.LopProperties.ExecType;
 import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.parser.Expression.ValueType;
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
-import org.apache.sysml.utils.IntUtils;
 
 //for now only works for range based indexing op
 public class IndexingOp extends Hop 
@@ -348,8 +347,8 @@ public class IndexingOp extends Hop
                long ru = (input3 instanceof LiteralOp) ? 
(HopRewriteUtils.getIntValueSafe((LiteralOp)input3)) : -1;
                long cl = (input4 instanceof LiteralOp) ? 
(HopRewriteUtils.getIntValueSafe((LiteralOp)input4)) : -1;
                long cu = (input5 instanceof LiteralOp) ? 
(HopRewriteUtils.getIntValueSafe((LiteralOp)input5)) : -1;
-               int brlen = input1.getRowsInBlock();
-               int bclen = input1.getColsInBlock();
+               int brlen = (int)input1.getRowsInBlock();
+               int bclen = (int)input1.getColsInBlock();
                
                return OptimizerUtils.isIndexingRangeBlockAligned(rl, ru, cl, 
cu, brlen, bclen);
        }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/hops/OptimizerUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/OptimizerUtils.java 
b/src/main/java/org/apache/sysml/hops/OptimizerUtils.java
index 432319f..a43abb3 100644
--- a/src/main/java/org/apache/sysml/hops/OptimizerUtils.java
+++ b/src/main/java/org/apache/sysml/hops/OptimizerUtils.java
@@ -57,7 +57,6 @@ import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.data.SparseBlock;
 import org.apache.sysml.runtime.util.IndexRange;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.yarn.ropt.YarnClusterAnalyzer;
 
 public class OptimizerUtils 
@@ -520,7 +519,7 @@ public class OptimizerUtils
                        
                        //correction max number of reducers on yarn clusters
                        if( InfrastructureAnalyzer.isYarnEnabled() )
-                               ret = IntUtils.toInt(Math.max( ret, 
YarnClusterAnalyzer.getNumCores()/2 ));
+                               ret = (int)Math.max( ret, 
YarnClusterAnalyzer.getNumCores()/2 );
                }
                
                return ret;
@@ -535,7 +534,7 @@ public class OptimizerUtils
                        
                //correction max number of reducers on yarn clusters
                if( InfrastructureAnalyzer.isYarnEnabled() )
-                       ret = IntUtils.toInt(Math.max( ret, 
YarnClusterAnalyzer.getNumCores() ));
+                       ret = (int)Math.max( ret, 
YarnClusterAnalyzer.getNumCores() );
                
                return ret;
        }
@@ -583,7 +582,7 @@ public class OptimizerUtils
                //compute degree of parallelism for parallel text read
                double dop = InfrastructureAnalyzer.getLocalParallelism()
                                     * PARALLEL_CP_READ_PARALLELISM_MULTIPLIER;
-               return IntUtils.toInt( Math.round(dop) );
+               return (int) Math.round(dop);
        }
 
        public static int getParallelBinaryReadParallelism()
@@ -594,7 +593,7 @@ public class OptimizerUtils
                //compute degree of parallelism for parallel text read
                double dop = InfrastructureAnalyzer.getLocalParallelism()
                                     * PARALLEL_CP_READ_PARALLELISM_MULTIPLIER;
-               return IntUtils.toInt( Math.round(dop) );
+               return (int) Math.round(dop);
        }
        
        /**
@@ -613,7 +612,7 @@ public class OptimizerUtils
                //compute degree of parallelism for parallel text read
                double dop = InfrastructureAnalyzer.getLocalParallelism()
                                     * PARALLEL_CP_WRITE_PARALLELISM_MULTIPLIER;
-               return IntUtils.toInt( Math.round(dop) );
+               return (int) Math.round(dop);
        }
 
        public static int getParallelBinaryWriteParallelism()
@@ -624,7 +623,7 @@ public class OptimizerUtils
                //compute degree of parallelism for parallel text read
                double dop = InfrastructureAnalyzer.getLocalParallelism()
                                     * PARALLEL_CP_WRITE_PARALLELISM_MULTIPLIER;
-               return IntUtils.toInt( Math.round(dop) );
+               return (int) Math.round(dop);
        }
        
        ////////////////////////

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/hops/codegen/cplan/CNodeNary.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/hops/codegen/cplan/CNodeNary.java 
b/src/main/java/org/apache/sysml/hops/codegen/cplan/CNodeNary.java
index b7d0d26..1a717d3 100644
--- a/src/main/java/org/apache/sysml/hops/codegen/cplan/CNodeNary.java
+++ b/src/main/java/org/apache/sysml/hops/codegen/cplan/CNodeNary.java
@@ -27,7 +27,6 @@ import org.apache.sysml.hops.codegen.template.TemplateUtils;
 import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.runtime.util.DnnUtils;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 
 public class CNodeNary extends CNode
 {
@@ -231,8 +230,8 @@ public class CNodeNary extends CNode
                int K = Integer.parseInt(inputs.get(off+9).getVarname());
                int R = Integer.parseInt(inputs.get(off+11).getVarname());
                int S = Integer.parseInt(inputs.get(off+12).getVarname());
-               int P = IntUtils.toInt(DnnUtils.getP(H, R, 1, 0));
-               int Q = IntUtils.toInt(DnnUtils.getQ(W, S, 1, 0));
+               int P = (int) DnnUtils.getP(H, R, 1, 0);
+               int Q = (int) DnnUtils.getQ(W, S, 1, 0);
                
                //construct parameter string
                return "rix, " + StringUtils.join(

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java 
b/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
index c0c8acd..c81848a 100644
--- a/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
+++ b/src/main/java/org/apache/sysml/hops/recompile/LiteralReplacement.java
@@ -44,7 +44,6 @@ import org.apache.sysml.runtime.instructions.cp.ListObject;
 import org.apache.sysml.runtime.instructions.cp.ScalarObject;
 import org.apache.sysml.runtime.instructions.cp.ScalarObjectFactory;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 public class LiteralReplacement 
@@ -248,7 +247,7 @@ public class LiteralReplacement
                                if( mo.getNumRows()*mo.getNumColumns() < 
REPLACE_LITERALS_MAX_MATRIX_SIZE )
                                {
                                        MatrixBlock mBlock = mo.acquireRead();
-                                       double value = 
mBlock.getValue(IntUtils.toInt(rlval-1),IntUtils.toInt(clval-1));
+                                       double value = 
mBlock.getValue((int)rlval-1,(int)clval-1);
                                        mo.release();
                                        
                                        //literal substitution (always double)
@@ -322,7 +321,7 @@ public class LiteralReplacement
                                if( mo.getNumRows()*mo.getNumColumns() < 
REPLACE_LITERALS_MAX_MATRIX_SIZE )
                                {
                                        MatrixBlock mBlock = mo.acquireRead();
-                                       MatrixBlock mBlock2 = 
mBlock.slice(IntUtils.toInt(rlval-1), IntUtils.toInt(ruval-1), 
IntUtils.toInt(clval-1), IntUtils.toInt(cuval-1), new MatrixBlock());
+                                       MatrixBlock mBlock2 = 
mBlock.slice((int)(rlval-1), (int)(ruval-1), (int)(clval-1), (int)(cuval-1), 
new MatrixBlock());
                                        double value = 
replaceUnaryAggregate((AggUnaryOp)c, mBlock2);
                                        mo.release();
                                                
@@ -370,7 +369,7 @@ public class LiteralReplacement
                                String varname = 
Dag.getNextUniqueVarname(DataType.MATRIX);
                                LiteralOp lit = (LiteralOp) 
ix.getInput().get(1);
                                MatrixObject mo = (MatrixObject) 
(!lit.getValueType().isNumeric() ?
-                                       list.slice(lit.getName()) : 
list.slice(IntUtils.toInt(lit.getLongValue()-1)));
+                                       list.slice(lit.getName()) : 
list.slice((int)lit.getLongValue()-1));
                                vars.put(varname, mo);
                                ret = 
HopRewriteUtils.createTransientRead(varname, c);
                        }
@@ -392,7 +391,7 @@ public class LiteralReplacement
                                ListObject list = 
(ListObject)vars.get(ixIn.getName());
                                LiteralOp lit = (LiteralOp) 
ix.getInput().get(1);
                                ScalarObject so = (ScalarObject) 
(!lit.getValueType().isNumeric() ?
-                                       list.slice(lit.getName()) : 
list.slice(IntUtils.toInt(lit.getLongValue()-1)));
+                                       list.slice(lit.getName()) : 
list.slice((int)lit.getLongValue()-1));
                                return ScalarObjectFactory.createLiteralOp(so);
                        }
                }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/lops/compile/Dag.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/lops/compile/Dag.java 
b/src/main/java/org/apache/sysml/lops/compile/Dag.java
index ce7288e..47b497c 100644
--- a/src/main/java/org/apache/sysml/lops/compile/Dag.java
+++ b/src/main/java/org/apache/sysml/lops/compile/Dag.java
@@ -83,7 +83,6 @@ import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
 import org.apache.sysml.runtime.matrix.data.InputInfo;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.sort.PickFromCompactInputFormat;
-import org.apache.sysml.utils.IntUtils;
 
 
 /**
@@ -2339,8 +2338,8 @@ public class Dag<N extends Lop>
                                // generate an instruction that creates a 
symbol table entry for the new variable
                                //String createInst = 
prepareVariableInstruction("createvar", node);
                                
//out.addPreInstruction(CPInstructionParser.parseSingleInstruction(createInst));
-                               int rpb = 
IntUtils.toInt(oparams.getRowsInBlock());
-                               int cpb = 
IntUtils.toInt(oparams.getColsInBlock());
+                               int rpb = (int) oparams.getRowsInBlock();
+                               int cpb = (int) oparams.getColsInBlock();
                                Instruction createvarInst = 
VariableCPInstruction.prepareCreateVariableInstruction(
                                                                                
oparams.getLabel(),
                                                                                
        oparams.getFile_name(), 
@@ -2378,7 +2377,7 @@ public class Dag<N extends Lop>
                                                                getFilePath() + 
fnOutParams.getLabel(), 
                                                                true, 
fnOut.getDataType(),
                                                                
OutputInfo.outputInfoToString(getOutputInfo(fnOut, false)),
-                                                               new 
MatrixCharacteristics(fnOutParams.getNumRows(), fnOutParams.getNumCols(), 
IntUtils.toInt(fnOutParams.getRowsInBlock()), 
IntUtils.toInt(fnOutParams.getColsInBlock()), fnOutParams.getNnz()),
+                                                               new 
MatrixCharacteristics(fnOutParams.getNumRows(), fnOutParams.getNumCols(), 
(int)fnOutParams.getRowsInBlock(), (int)fnOutParams.getColsInBlock(), 
fnOutParams.getNnz()),
                                                                
oparams.getUpdateType()
                                                        );
                                                
@@ -2484,8 +2483,8 @@ public class Dag<N extends Lop>
                                                String tempVarName = 
oparams.getLabel() + "temp";
                                                String tempFileName = 
getNextUniqueFilename();
                                                
-                                               int rpb = IntUtils.toInt( 
oparams.getRowsInBlock() );
-                                               int cpb = IntUtils.toInt( 
oparams.getColsInBlock() );
+                                               int rpb = (int) 
oparams.getRowsInBlock();
+                                               int cpb = (int) 
oparams.getColsInBlock();
                                                Instruction createvarInst = 
VariableCPInstruction.prepareCreateVariableInstruction(
                                                                                
                        tempVarName, 
                                                                                
                        tempFileName, 
@@ -2540,8 +2539,8 @@ public class Dag<N extends Lop>
                                                // create a variable to hold 
the result produced by this "rootNode"
                                                oparams.setLabel("pVar" + 
var_index.getNextID() );
                                                
-                                               int rpb = IntUtils.toInt( 
oparams.getRowsInBlock() );
-                                               int cpb = IntUtils.toInt( 
oparams.getColsInBlock() );
+                                               int rpb = (int) 
oparams.getRowsInBlock();
+                                               int cpb = (int) 
oparams.getColsInBlock();
                                                Lop fnameLop = 
((Data)node).getNamedInputLop(DataExpression.IO_FILENAME);
                                                String fnameStr = (fnameLop 
instanceof Data && ((Data)fnameLop).isLiteral()) ? 
                                                                           
fnameLop.getOutputParameters().getLabel() 

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java 
b/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
index ea0cb24..e8145ac 100644
--- a/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
+++ b/src/main/java/org/apache/sysml/lops/runtime/RunMRJobs.java
@@ -67,7 +67,6 @@ import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.data.RandomMatrixGenerator;
 import org.apache.sysml.runtime.util.MapReduceTool;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 
@@ -493,7 +492,7 @@ public class RunMRJobs
                                RandInstruction lrand = 
(RandInstruction)ldgInst; 
                                RandomMatrixGenerator rgen = 
LibMatrixDatagen.createRandomMatrixGenerator(
                                                                                
                                                        
lrand.getProbabilityDensityFunction(), 
-                                                                               
                                                        
IntUtils.toInt(lrand.getRows()), IntUtils.toInt(lrand.getCols()), 
+                                                                               
                                                        (int)lrand.getRows(), 
(int)lrand.getCols(), 
                                                                                
                                                        lrand.getRowsInBlock(), 
lrand.getColsInBlock(), 
                                                                                
                                                        lrand.getSparsity(), 
lrand.getMinValue(), lrand.getMaxValue(), 
                                                                                
                                                        lrand.getPdfParams());

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java 
b/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
index c131215..a851a4d 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/ParForProgramBlock.java
@@ -111,7 +111,6 @@ import org.apache.sysml.runtime.io.IOUtilFunctions;
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 import org.apache.sysml.yarn.ropt.YarnClusterAnalyzer;
 
@@ -889,7 +888,7 @@ public class ParForProgramBlock extends ForProgramBlock
                String resultFile = constructResultFileName();
                
                long numIterations = partitioner.getNumIterations();
-               int maxDigits = IntUtils.toInt(Math.log10(to.getLongValue()) + 
1);
+               int maxDigits = (int)Math.log10(to.getLongValue()) + 1;
                long numCreatedTasks = -1;
                if( USE_STREAMING_TASK_CREATION ) {
                        LocalTaskQueue<Task> queue = new LocalTaskQueue<>();
@@ -1454,7 +1453,7 @@ public class ParForProgramBlock extends ForProgramBlock
                int maxNumRed = 
InfrastructureAnalyzer.getRemoteParallelReduceTasks();
                //correction max number of reducers on yarn clusters
                if( InfrastructureAnalyzer.isYarnEnabled() )
-                       maxNumRed = IntUtils.toInt(Math.max( maxNumRed, 
YarnClusterAnalyzer.getNumCores()/2 ));
+                       maxNumRed = (int)Math.max( maxNumRed, 
YarnClusterAnalyzer.getNumCores()/2 );
                int numRed = Math.min(numReducers,maxNumRed);
                
                //create data partitioner
@@ -1485,7 +1484,7 @@ public class ParForProgramBlock extends ForProgramBlock
                //determine degree of parallelism
                int maxMap = -1, maxRed = -1;
                if( OptimizerUtils.isSparkExecutionMode() ) {
-                       maxMap = IntUtils.toInt( 
SparkExecutionContext.getDefaultParallelism(true));
+                       maxMap = (int) 
SparkExecutionContext.getDefaultParallelism(true);
                        maxRed = maxMap; //equal map/reduce
                }
                else {
@@ -1495,8 +1494,8 @@ public class ParForProgramBlock extends ForProgramBlock
                                
InfrastructureAnalyzer.getRemoteParallelReduceTasks());
                        //correction max number of reducers on yarn clusters
                        if( InfrastructureAnalyzer.isYarnEnabled() ) {
-                               maxMap = IntUtils.toInt(Math.max( maxMap, 
YarnClusterAnalyzer.getNumCores() ));
-                               maxRed = IntUtils.toInt(Math.max( maxRed, 
YarnClusterAnalyzer.getNumCores()/2 ));
+                               maxMap = (int)Math.max( maxMap, 
YarnClusterAnalyzer.getNumCores() );
+                               maxRed = (int)Math.max( maxRed, 
YarnClusterAnalyzer.getNumCores()/2 );
                        }
                }
                int numMap = Math.max(_numThreads, maxMap);
@@ -1633,8 +1632,8 @@ public class ParForProgramBlock extends ForProgramBlock
                        int par = Math.min( _resultVars.size(), 
                                                    
InfrastructureAnalyzer.getLocalParallelism() );
                        if( InfrastructureAnalyzer.isLocalMode() ) {
-                               int parmem = 
IntUtils.toInt(Math.floor(OptimizerUtils.getLocalMemBudget() / 
-                                               
InfrastructureAnalyzer.getRemoteMaxMemorySortBuffer()));
+                               int parmem = 
(int)Math.floor(OptimizerUtils.getLocalMemBudget() / 
+                                               
InfrastructureAnalyzer.getRemoteMaxMemorySortBuffer());
                                par = Math.min(par, Math.max(parmem, 1)); 
//reduce k if necessary
                        }
                        
@@ -1750,7 +1749,7 @@ public class ParForProgramBlock extends ForProgramBlock
                if( _IDPrefix == -1 ) //not specified
                        _ID = _pfIDSeq.getNextID(); //generated new ID
                else //remote case (further nested parfors are all in one JVM)
-                       _ID = IDHandler.concatIntIDsToLong(_IDPrefix, 
IntUtils.toInt(_pfIDSeq.getNextID()));    
+                       _ID = IDHandler.concatIntIDsToLong(_IDPrefix, 
(int)_pfIDSeq.getNextID());       
        }
        
        /**
@@ -1770,7 +1769,7 @@ public class ParForProgramBlock extends ForProgramBlock
                        if(_IDPrefix == -1)
                                _pwIDs[i] = _pwIDSeq.getNextID();
                        else
-                               _pwIDs[i] = 
IDHandler.concatIntIDsToLong(_IDPrefix,IntUtils.toInt(_pwIDSeq.getNextID()));
+                               _pwIDs[i] = 
IDHandler.concatIntIDsToLong(_IDPrefix,(int)_pwIDSeq.getNextID());
                        
                        if( _monitor ) 
                                StatisticMonitor.putPfPwMapping(_ID, _pwIDs[i]);

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/caching/ByteBuffer.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/ByteBuffer.java 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/ByteBuffer.java
index db3b073..a87e4b4 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/ByteBuffer.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/ByteBuffer.java
@@ -28,7 +28,6 @@ import java.io.IOException;
 import org.apache.sysml.runtime.matrix.data.FrameBlock;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.util.LocalFileUtils;
-import org.apache.sysml.utils.IntUtils;
 
 /**
  * Wrapper for WriteBuffer byte array per matrix/frame in order to
@@ -62,9 +61,9 @@ public class ByteBuffer
                        {
                                //deep serialize (for compression)
                                if( CacheableData.CACHING_BUFFER_PAGECACHE )
-                                       _bdata = 
PageCache.getPage(IntUtils.toInt(_size));
+                                       _bdata = PageCache.getPage((int)_size);
                                if( _bdata==null )
-                                       _bdata = new 
byte[IntUtils.toInt(_size)];
+                                       _bdata = new byte[(int)_size];
                                DataOutput dout = new CacheDataOutput(_bdata);
                                cb.write(dout);
                        }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheStatistics.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheStatistics.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheStatistics.java
index b8fde2f..c569787 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheStatistics.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheStatistics.java
@@ -86,7 +86,7 @@ public class CacheStatistics
                _numHitsMem.increment();
        }
        
-       public static void incrementMemHits(long delta) {
+       public static void incrementMemHits(int delta) {
                _numHitsMem.add(delta);
        }
        
@@ -98,7 +98,7 @@ public class CacheStatistics
                _numHitsFSBuff.increment();
        }
        
-       public static void incrementFSBuffHits( long delta ) {
+       public static void incrementFSBuffHits( int delta ) {
                _numHitsFSBuff.add(delta);
        }
        
@@ -110,7 +110,7 @@ public class CacheStatistics
                _numHitsFS.increment();
        }
        
-       public static void incrementFSHits(long delta) {
+       public static void incrementFSHits(int delta) {
                _numHitsFS.add(delta);
        }
        
@@ -122,7 +122,7 @@ public class CacheStatistics
                _numHitsHDFS.increment();
        }
        
-       public static void incrementHDFSHits(long delta) {
+       public static void incrementHDFSHits(int delta) {
                _numHitsHDFS.add(delta);
        }
        
@@ -134,7 +134,7 @@ public class CacheStatistics
                _numWritesFSBuff.increment();
        }
        
-       public static void incrementFSBuffWrites(long delta) {
+       public static void incrementFSBuffWrites(int delta) {
                _numWritesFSBuff.add(delta);
        }
        
@@ -146,7 +146,7 @@ public class CacheStatistics
                _numWritesFS.increment();
        }
        
-       public static void incrementFSWrites(long delta) {
+       public static void incrementFSWrites(int delta) {
                _numWritesFS.add(delta);
        }
        
@@ -158,7 +158,7 @@ public class CacheStatistics
                _numWritesHDFS.increment();
        }
        
-       public static void incrementHDFSWrites(long delta) {
+       public static void incrementHDFSWrites(int delta) {
                _numWritesHDFS.add(delta);
        }
        

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
index dca58a6..b953906 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/FrameObject.java
@@ -43,7 +43,6 @@ import org.apache.sysml.runtime.matrix.data.FrameBlock;
 import org.apache.sysml.runtime.matrix.data.InputInfo;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 
 public class FrameObject extends CacheableData<FrameBlock>
 {
@@ -108,14 +107,14 @@ public class FrameObject extends CacheableData<FrameBlock>
         */
        public ValueType[] mergeSchemas(FrameObject fo) {
                return (ValueType[]) ArrayUtils.addAll(
-                       (_schema!=null) ? _schema : 
UtilFunctions.nCopies(IntUtils.toInt(getNumColumns()), ValueType.STRING), 
-                       (fo._schema!=null) ? fo._schema : 
UtilFunctions.nCopies(IntUtils.toInt(fo.getNumColumns()), ValueType.STRING));
+                       (_schema!=null) ? _schema : 
UtilFunctions.nCopies((int)getNumColumns(), ValueType.STRING), 
+                       (fo._schema!=null) ? fo._schema : 
UtilFunctions.nCopies((int)fo.getNumColumns(), ValueType.STRING));
        } 
        
        public void setSchema(String schema) {
                if( schema.equals("*") ) {
                        //populate default schema
-                       int clen = IntUtils.toInt( getNumColumns() );
+                       int clen = (int) getNumColumns();
                        if( clen >= 0 ) //known number of cols
                                _schema = UtilFunctions.nCopies(clen, 
ValueType.STRING);
                }
@@ -170,7 +169,7 @@ public class FrameObject extends CacheableData<FrameBlock>
                
                //handle missing schema if necessary
                ValueType[] lschema = (_schema!=null) ? _schema : 
-                       UtilFunctions.nCopies(clen>=1 ? IntUtils.toInt(clen) : 
1, ValueType.STRING);
+                       UtilFunctions.nCopies(clen>=1 ? (int)clen : 1, 
ValueType.STRING);
                
                //read the frame block
                FrameBlock data = null;
@@ -202,12 +201,12 @@ public class FrameObject extends CacheableData<FrameBlock>
                
                MetaDataFormat iimd = (MetaDataFormat) _metaData;
                MatrixCharacteristics mc = iimd.getMatrixCharacteristics();
-               int rlen = IntUtils.toInt(mc.getRows());
-               int clen = IntUtils.toInt(mc.getCols());
+               int rlen = (int)mc.getRows();
+               int clen = (int)mc.getCols();
                
                //handle missing schema if necessary
                ValueType[] lschema = (_schema!=null) ? _schema : 
-                       UtilFunctions.nCopies(clen>=1 ? IntUtils.toInt(clen) : 
1, ValueType.STRING);
+                       UtilFunctions.nCopies(clen>=1 ? (int)clen : 1, 
ValueType.STRING);
                
                FrameBlock fb = null;
                try  {

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
index 66bd0e3..4ab0f34 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/caching/MatrixObject.java
@@ -43,7 +43,6 @@ import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.DataConverter;
 import org.apache.sysml.runtime.util.IndexRange;
 import org.apache.sysml.runtime.util.MapReduceTool;
-import org.apache.sysml.utils.IntUtils;
 
 
 /**
@@ -314,7 +313,7 @@ public class MatrixObject extends CacheableData<MatrixBlock>
                                        mb = readBlobFromHDFS( fname, rows, 
cols );
                                else
                                {
-                                       mb = new 
MatrixBlock(IntUtils.toInt(rows), IntUtils.toInt(cols), true);
+                                       mb = new MatrixBlock((int)rows, 
(int)cols, true);
                                        LOG.warn("Reading empty matrix 
partition "+fname);
                                }
                        }
@@ -328,13 +327,13 @@ public class MatrixObject extends 
CacheableData<MatrixBlock>
                                
                                if( _partitionFormat == 
PDataPartitionFormat.ROW_BLOCK_WISE )
                                {
-                                       int rix = 
IntUtils.toInt((pred.rowStart-1)%brlen);
-                                       mb = mb.slice(rix, rix, 
IntUtils.toInt(pred.colStart-1), IntUtils.toInt(pred.colEnd-1), new 
MatrixBlock());
+                                       int rix = 
(int)((pred.rowStart-1)%brlen);
+                                       mb = mb.slice(rix, rix, 
(int)(pred.colStart-1), (int)(pred.colEnd-1), new MatrixBlock());
                                }
                                if( _partitionFormat == 
PDataPartitionFormat.COLUMN_BLOCK_WISE )
                                {
-                                       int cix = 
IntUtils.toInt((pred.colStart-1)%bclen);
-                                       mb = 
mb.slice(IntUtils.toInt(pred.rowStart-1), IntUtils.toInt(pred.rowEnd-1), cix, 
cix, new MatrixBlock());
+                                       int cix = 
(int)((pred.colStart-1)%bclen);
+                                       mb = mb.slice((int)(pred.rowStart-1), 
(int)(pred.rowEnd-1), cix, cix, new MatrixBlock());
                                }
                        }
                        
@@ -468,10 +467,10 @@ public class MatrixObject extends 
CacheableData<MatrixBlock>
                        }
                        
                        //obtain matrix block from RDD
-                       int rlen = IntUtils.toInt(mc.getRows());
-                       int clen = IntUtils.toInt(mc.getCols());
-                       int brlen = mc.getRowsPerBlock();
-                       int bclen = mc.getColsPerBlock();
+                       int rlen = (int)mc.getRows();
+                       int clen = (int)mc.getCols();
+                       int brlen = (int)mc.getRowsPerBlock();
+                       int bclen = (int)mc.getColsPerBlock();
                        long nnz = mc.getNonZerosBound();
                        
                        //guarded rdd collect 

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
index c0c949f..3e8636b 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/context/ExecutionContext.java
@@ -60,7 +60,6 @@ import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.data.Pair;
 import org.apache.sysml.runtime.util.MapReduceTool;
 import org.apache.sysml.utils.GPUStatistics;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 
@@ -289,7 +288,7 @@ public class ExecutionContext {
                if( oldMetaData == null || !(oldMetaData instanceof 
MetaDataFormat) )
                        throw new DMLRuntimeException("Metadata not available");
                MatrixCharacteristics mc = new MatrixCharacteristics(nrows, 
ncols,
-                               IntUtils.toInt(mo.getNumRowsPerBlock()), 
IntUtils.toInt(mo.getNumColumnsPerBlock()));
+                       (int) mo.getNumRowsPerBlock(), 
(int)mo.getNumColumnsPerBlock());
                mo.setMetaData(new MetaDataFormat(mc, 
                        ((MetaDataFormat)oldMetaData).getOutputInfo(),
                        ((MetaDataFormat)oldMetaData).getInputInfo()));

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
index b04aad0..0ed86f1 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/context/SparkExecutionContext.java
@@ -84,7 +84,6 @@ import org.apache.sysml.runtime.matrix.data.SparseBlock;
 import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
 import org.apache.sysml.runtime.util.MapReduceTool;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.MLContextProxy;
 import org.apache.sysml.utils.Statistics;
 
@@ -377,7 +376,7 @@ public class SparkExecutionContext extends ExecutionContext
                        }
                        else { //default case
                                MatrixBlock mb = mo.acquireRead(); //pin matrix 
in memory
-                               rdd = toMatrixJavaPairRDD(sc, mb, 
IntUtils.toInt(mo.getNumRowsPerBlock()), 
IntUtils.toInt(mo.getNumColumnsPerBlock()), numParts, inclEmpty);
+                               rdd = toMatrixJavaPairRDD(sc, mb, 
(int)mo.getNumRowsPerBlock(), (int)mo.getNumColumnsPerBlock(), numParts, 
inclEmpty);
                                mo.release(); //unpin matrix
                                _parRDDs.registerRDD(rdd.id(), 
OptimizerUtils.estimatePartitionedSizeExactSparsity(mc), true);
                        }
@@ -574,8 +573,8 @@ public class SparkExecutionContext extends ExecutionContext
                                
CacheableData.addBroadcastSize(-mo.getBroadcastHandle().getPartitionedBroadcastSize());
 
                        //obtain meta data for matrix
-                       int brlen = IntUtils.toInt( mo.getNumRowsPerBlock() );
-                       int bclen = IntUtils.toInt( mo.getNumColumnsPerBlock() 
);
+                       int brlen = (int) mo.getNumRowsPerBlock();
+                       int bclen = (int) mo.getNumColumnsPerBlock();
 
                        //create partitioned matrix block and release memory 
consumed by input
                        MatrixBlock mb = mo.acquireRead();
@@ -584,7 +583,7 @@ public class SparkExecutionContext extends ExecutionContext
 
                        //determine coarse-grained partitioning
                        int numPerPart = 
PartitionedBroadcast.computeBlocksPerPartition(mo.getNumRows(), 
mo.getNumColumns(), brlen, bclen);
-                       int numParts = IntUtils.toInt( Math.ceil((double) 
pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart) );
+                       int numParts = (int) Math.ceil((double) 
pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart);
                        Broadcast<PartitionedBlock<MatrixBlock>>[] ret = new 
Broadcast[numParts];
 
                        //create coarse-grained partitioned broadcasts
@@ -639,7 +638,7 @@ public class SparkExecutionContext extends ExecutionContext
                                
CacheableData.addBroadcastSize(-fo.getBroadcastHandle().getPartitionedBroadcastSize());
 
                        //obtain meta data for frame
-                       int bclen = IntUtils.toInt( fo.getNumColumns() );
+                       int bclen = (int) fo.getNumColumns();
                        int brlen = OptimizerUtils.getDefaultFrameSize();
 
                        //create partitioned frame block and release memory 
consumed by input
@@ -649,7 +648,7 @@ public class SparkExecutionContext extends ExecutionContext
 
                        //determine coarse-grained partitioning
                        int numPerPart = 
PartitionedBroadcast.computeBlocksPerPartition(fo.getNumRows(), 
fo.getNumColumns(), brlen, bclen);
-                       int numParts = IntUtils.toInt( Math.ceil((double) 
pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart) );
+                       int numParts = (int) Math.ceil((double) 
pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart);
                        Broadcast<PartitionedBlock<FrameBlock>>[] ret = new 
Broadcast[numParts];
 
                        //create coarse-grained partitioned broadcasts
@@ -745,8 +744,8 @@ public class SparkExecutionContext extends ExecutionContext
                        int maxCol = 
UtilFunctions.computeBlockSize(mc.getCols(), blockCol+1, mc.getColsPerBlock());
                        //copy sub-matrix to block
                        MatrixBlock block = new MatrixBlock(maxRow, maxCol, 
mb.isInSparseFormat());
-                       int row_offset = 
IntUtils.toInt(blockRow*mc.getRowsPerBlock());
-                       int col_offset = 
IntUtils.toInt(blockCol*mc.getColsPerBlock());
+                       int row_offset = (int)blockRow*mc.getRowsPerBlock();
+                       int col_offset = (int)blockCol*mc.getColsPerBlock();
                        block = mb.slice( row_offset, row_offset+maxRow-1,
                                col_offset, col_offset+maxCol-1, block );
                        //create key-value pair
@@ -763,7 +762,7 @@ public class SparkExecutionContext extends ExecutionContext
 
                //create and write subblocks of matrix
                int blksize = ConfigurationManager.getBlocksize();
-               for(int blockRow = 0; blockRow < 
IntUtils.toInt(Math.ceil(src.getNumRows()/(double)blksize)); blockRow++)
+               for(int blockRow = 0; blockRow < 
(int)Math.ceil(src.getNumRows()/(double)blksize); blockRow++)
                {
                        int maxRow = (blockRow*blksize + blksize < 
src.getNumRows()) ? blksize : src.getNumRows() - blockRow*blksize;
                        int roffset = blockRow*blksize;
@@ -864,8 +863,8 @@ public class SparkExecutionContext extends ExecutionContext
                                MatrixBlock block = keyval._2();
                                
                                //compute row/column block offsets
-                               int row_offset = 
IntUtils.toInt(ix.getRowIndex()-1)*brlen;
-                               int col_offset = 
IntUtils.toInt(ix.getColumnIndex()-1)*bclen;
+                               int row_offset = 
(int)(ix.getRowIndex()-1)*brlen;
+                               int col_offset = 
(int)(ix.getColumnIndex()-1)*bclen;
                                int rows = block.getNumRows();
                                int cols = block.getNumColumns();
                                
@@ -945,7 +944,7 @@ public class SparkExecutionContext extends ExecutionContext
 
                        //append cell to dense/sparse target in order to avoid 
shifting for sparse
                        //note: this append requires a final sort of sparse rows
-                       out.appendValue(IntUtils.toInt(ix.getRowIndex()-1), 
IntUtils.toInt(ix.getColumnIndex()-1), cell.getValue());
+                       out.appendValue((int)ix.getRowIndex()-1, 
(int)ix.getColumnIndex()-1, cell.getValue());
                }
 
                //post-processing output matrix
@@ -976,7 +975,7 @@ public class SparkExecutionContext extends ExecutionContext
                        //unpack index-block pair
                        MatrixIndexes ix = keyval._1();
                        MatrixBlock block = keyval._2();
-                       out.setBlock(IntUtils.toInt(ix.getRowIndex()), 
IntUtils.toInt(ix.getColumnIndex()), block);
+                       out.setBlock((int)ix.getRowIndex(), 
(int)ix.getColumnIndex(), block);
                }
 
                if (ConfigurationManager.isStatistics()) {
@@ -1009,7 +1008,7 @@ public class SparkExecutionContext extends 
ExecutionContext
                for( Tuple2<Long,FrameBlock> keyval : list )
                {
                        //unpack index-block pair
-                       int ix = IntUtils.toInt(keyval._1() - 1);
+                       int ix = (int)(keyval._1() - 1);
                        FrameBlock block = keyval._2();
 
                        //copy into output frame
@@ -1315,7 +1314,7 @@ public class SparkExecutionContext extends 
ExecutionContext
                if( pool < 0 ) {
                        pool = _poolBuff.length;
                        _poolBuff = Arrays.copyOf(_poolBuff,
-                                       IntUtils.toInt(Math.min(2L*pool, 
Integer.MAX_VALUE)));
+                                       (int)Math.min(2L*pool, 
Integer.MAX_VALUE));
                }
                //mark pool name for in use
                _poolBuff[pool] = true;

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
index 7659245..df9c925 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/LocalPSWorker.java
@@ -30,7 +30,6 @@ import 
org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
 import org.apache.sysml.runtime.controlprogram.parfor.stat.Timing;
 import org.apache.sysml.runtime.instructions.cp.ListObject;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 public class LocalPSWorker extends PSWorker implements Callable<Void> {
@@ -54,7 +53,7 @@ public class LocalPSWorker extends PSWorker implements 
Callable<Void> {
                incWorkerNumber();
                try {
                        long dataSize = _features.getNumRows();
-                       int batchIter = IntUtils.toInt( Math.ceil((double) 
dataSize / _batchSize) );
+                       int batchIter = (int) Math.ceil((double) dataSize / 
_batchSize);
 
                        switch (_freq) {
                                case BATCH:

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
index b93604f..58bf311 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/ParamservUtils.java
@@ -71,7 +71,6 @@ import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.operators.BinaryOperator;
 import org.apache.sysml.runtime.util.ProgramConverter;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 import scala.Tuple2;
@@ -195,7 +194,7 @@ public class ParamservUtils {
         * @return new sliced matrix block
         */
        public static MatrixBlock sliceMatrixBlock(MatrixBlock mb, long rl, 
long rh) {
-               return mb.slice(IntUtils.toInt( rl - 1 ), IntUtils.toInt( rh - 
1 ));
+               return mb.slice((int) rl - 1, (int) rh - 1);
        }
 
        /**
@@ -390,7 +389,7 @@ public class ParamservUtils {
                JavaPairRDD<MatrixIndexes, MatrixBlock> labelsRDD = 
(JavaPairRDD<MatrixIndexes, MatrixBlock>)
                        sec.getRDDHandleForMatrixObject(labels, 
InputInfo.BinaryBlockInputInfo);
 
-               DataPartitionerSparkMapper mapper = new 
DataPartitionerSparkMapper(scheme, workerNum, sec, IntUtils.toInt( 
features.getNumRows()));
+               DataPartitionerSparkMapper mapper = new 
DataPartitionerSparkMapper(scheme, workerNum, sec, (int) features.getNumRows());
                JavaPairRDD<Integer, Tuple2<MatrixBlock, MatrixBlock>> result = 
ParamservUtils
                        .assembleTrainingData(featuresRDD, labelsRDD) // 
Combine features and labels into a pair (rowBlockID => (features, labels))
                        .flatMapToPair(mapper) // Do the data partitioning on 
spark (workerID => (rowBlockID, (single row features, single row labels))

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRLocalScheme.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRLocalScheme.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRLocalScheme.java
index b9f37d9..464be99 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRLocalScheme.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRLocalScheme.java
@@ -29,7 +29,6 @@ import 
org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.controlprogram.paramserv.ParamservUtils;
 import org.apache.sysml.runtime.instructions.InstructionUtils;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
-import org.apache.sysml.utils.IntUtils;
 
 /**
  * Data partitioner Disjoint_Random:
@@ -40,7 +39,7 @@ import org.apache.sysml.utils.IntUtils;
 public class DRLocalScheme extends DataPartitionLocalScheme {
 
        private List<MatrixBlock> partition(int k, MatrixBlock mb, MatrixBlock 
permutation) {
-               int batchSize = IntUtils.toInt(Math.ceil((double) 
mb.getNumRows() / k));
+               int batchSize = (int) Math.ceil((double) mb.getNumRows() / k);
                return IntStream.range(0, k).mapToObj(i -> {
                        int begin = i * batchSize;
                        int end = Math.min((i + 1) * batchSize, 
mb.getNumRows());

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRSparkScheme.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRSparkScheme.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRSparkScheme.java
index 3f6deb3..df61af9 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRSparkScheme.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DRSparkScheme.java
@@ -26,7 +26,6 @@ import java.util.stream.IntStream;
 import org.apache.sysml.hops.OptimizerUtils;
 import org.apache.sysml.runtime.controlprogram.paramserv.ParamservUtils;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
-import org.apache.sysml.utils.IntUtils;
 
 import scala.Tuple2;
 
@@ -59,10 +58,10 @@ public class DRSparkScheme extends DataPartitionSparkScheme 
{
                        long shiftedPosition = (long) partialPerm.getValue(r, 
0);
 
                        // Get the shifted block and position
-                       int shiftedBlkID =  IntUtils.toInt(shiftedPosition / 
OptimizerUtils.DEFAULT_BLOCKSIZE + 1);
+                       int shiftedBlkID = (int) (shiftedPosition / 
OptimizerUtils.DEFAULT_BLOCKSIZE + 1);
 
                        MatrixBlock indicator = 
_workerIndicator.getBlock(shiftedBlkID, 1);
-                       int workerID = IntUtils.toInt( 
indicator.getValue(IntUtils.toInt( shiftedPosition / 
OptimizerUtils.DEFAULT_BLOCKSIZE), 0));
+                       int workerID = (int) indicator.getValue((int) 
shiftedPosition / OptimizerUtils.DEFAULT_BLOCKSIZE, 0);
                        return new Tuple2<>(workerID, new 
Tuple2<>(shiftedPosition, rowMB));
                }).collect(Collectors.toList());
        }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionSparkScheme.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionSparkScheme.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionSparkScheme.java
index 77d252c..7992ac8 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionSparkScheme.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionSparkScheme.java
@@ -28,7 +28,6 @@ import org.apache.sysml.hops.OptimizerUtils;
 import org.apache.sysml.runtime.controlprogram.paramserv.ParamservUtils;
 import org.apache.sysml.runtime.instructions.spark.data.PartitionedBroadcast;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
-import org.apache.sysml.utils.IntUtils;
 
 import scala.Tuple2;
 
@@ -66,7 +65,7 @@ public abstract class DataPartitionSparkScheme implements 
Serializable {
        protected List<Tuple2<Integer, Tuple2<Long, MatrixBlock>>> 
nonShuffledPartition(int rblkID, MatrixBlock mb) {
                MatrixBlock indicator = _workerIndicator.getBlock(rblkID, 1);
                return LongStream.range(0, mb.getNumRows()).mapToObj(r -> {
-                       int workerID = IntUtils.toInt( 
indicator.getValue(IntUtils.toInt( r ), 0) );
+                       int workerID = (int) indicator.getValue((int) r, 0);
                        MatrixBlock rowMB = ParamservUtils.sliceMatrixBlock(mb, 
r + 1, r + 1);
                        long shiftedPosition = r + (rblkID - 1) * 
OptimizerUtils.DEFAULT_BLOCKSIZE;
                        return new Tuple2<>(workerID, new 
Tuple2<>(shiftedPosition, rowMB));

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionerSparkAggregator.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionerSparkAggregator.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionerSparkAggregator.java
index 6381324..0314ccf 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionerSparkAggregator.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/DataPartitionerSparkAggregator.java
@@ -25,7 +25,6 @@ import java.util.LinkedList;
 import org.apache.spark.api.java.function.PairFunction;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
-import org.apache.sysml.utils.IntUtils;
 
 import scala.Tuple2;
 
@@ -52,15 +51,15 @@ public class DataPartitionerSparkAggregator implements 
PairFunction<Tuple2<Integ
         */
        @Override
        public Tuple2<Integer, Tuple2<MatrixBlock, MatrixBlock>> 
call(Tuple2<Integer, LinkedList<Tuple2<Long, Tuple2<MatrixBlock, 
MatrixBlock>>>> input) throws Exception {
-               MatrixBlock fmb = new MatrixBlock(input._2.size(), 
IntUtils.toInt( _fcol ), false);
-               MatrixBlock lmb = new MatrixBlock(input._2.size(), 
IntUtils.toInt( _lcol ), false);
+               MatrixBlock fmb = new MatrixBlock(input._2.size(), (int) _fcol, 
false);
+               MatrixBlock lmb = new MatrixBlock(input._2.size(), (int) _lcol, 
false);
 
                for (int i = 0; i < input._2.size(); i++) {
                        MatrixBlock tmpFMB = input._2.get(i)._2._1;
                        MatrixBlock tmpLMB = input._2.get(i)._2._2;
                        // Row-wise aggregation
-                       fmb = fmb.leftIndexingOperations(tmpFMB, i, i, 0, 
IntUtils.toInt( _fcol - 1 ), fmb, MatrixObject.UpdateType.INPLACE_PINNED);
-                       lmb = lmb.leftIndexingOperations(tmpLMB, i, i, 0, 
IntUtils.toInt( _lcol - 1 ), lmb, MatrixObject.UpdateType.INPLACE_PINNED);
+                       fmb = fmb.leftIndexingOperations(tmpFMB, i, i, 0, (int) 
_fcol - 1, fmb, MatrixObject.UpdateType.INPLACE_PINNED);
+                       lmb = lmb.leftIndexingOperations(tmpLMB, i, i, 0, (int) 
_lcol - 1, lmb, MatrixObject.UpdateType.INPLACE_PINNED);
                }
                return new Tuple2<>(input._1, new Tuple2<>(fmb, lmb));
        }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/SparkDataPartitioner.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/SparkDataPartitioner.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/SparkDataPartitioner.java
index 5ba1630..031150b 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/SparkDataPartitioner.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/dp/SparkDataPartitioner.java
@@ -33,7 +33,6 @@ import 
org.apache.sysml.runtime.controlprogram.paramserv.ParamservUtils;
 import org.apache.sysml.runtime.instructions.spark.data.PartitionedBroadcast;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.util.DataConverter;
-import org.apache.sysml.utils.IntUtils;
 
 public class SparkDataPartitioner implements Serializable {
 
@@ -75,7 +74,7 @@ public class SparkDataPartitioner implements Serializable {
 
        private void createDCIndicator(SparkExecutionContext sec, int 
numWorkers, int numEntries) {
                double[] vector = new double[numEntries];
-               int batchSize = IntUtils.toInt( Math.ceil((double) numEntries / 
numWorkers) );
+               int batchSize = (int) Math.ceil((double) numEntries / 
numWorkers);
                for (int i = 1; i < numWorkers; i++) {
                        int begin = batchSize * i;
                        int end = Math.min(begin + batchSize, numEntries);
@@ -91,7 +90,7 @@ public class SparkDataPartitioner implements Serializable {
                        // Create the source-target id vector from the 
permutation ranging from 1 to number of entries
                        double[] vector = new double[numEntries];
                        for (int j = 0; j < perm.getDenseBlockValues().length; 
j++) {
-                               vector[IntUtils.toInt( 
perm.getDenseBlockValues()[j] - 1)] = j;
+                               vector[(int) perm.getDenseBlockValues()[j] - 1] 
= j;
                        }
                        MatrixBlock vectorMB = 
DataConverter.convertToMatrixBlock(vector, true);
                        return 
sec.getBroadcastForMatrixObject(ParamservUtils.newMatrixObject(vectorMB));
@@ -102,6 +101,6 @@ public class SparkDataPartitioner implements Serializable {
        public DataPartitionSparkScheme.Result doPartitioning(int numWorkers, 
MatrixBlock features, MatrixBlock labels,
                        long rowID) {
                // Set the rowID in order to get the according permutation
-               return _scheme.doPartitioning(numWorkers, IntUtils.toInt( rowID 
), features, labels);
+               return _scheme.doPartitioning(numWorkers, (int) rowID, 
features, labels);
        }
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/rpc/PSRpcObject.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/rpc/PSRpcObject.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/rpc/PSRpcObject.java
index e2dfc92..38d80a2 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/rpc/PSRpcObject.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/paramserv/rpc/PSRpcObject.java
@@ -33,7 +33,6 @@ import org.apache.sysml.runtime.instructions.cp.Data;
 import org.apache.sysml.runtime.instructions.cp.ListObject;
 import org.apache.sysml.runtime.io.IOUtilFunctions;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
-import org.apache.sysml.utils.IntUtils;
 
 public abstract class PSRpcObject {
 
@@ -94,7 +93,7 @@ public abstract class PSRpcObject {
                        
((MatrixObject)d).acquireReadAndRelease().getExactSizeOnDisk()).sum();
                if( result > Integer.MAX_VALUE )
                        throw new DMLRuntimeException("Serialized size 
("+result+") larger than Integer.MAX_VALUE.");
-               return IntUtils.toInt( result );
+               return (int) result;
        }
 
        private void validateListObject(ListObject lo) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
index 8007851..3d12949 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitioner.java
@@ -32,7 +32,6 @@ import org.apache.sysml.runtime.matrix.data.InputInfo;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.MapReduceTool;
-import org.apache.sysml.utils.IntUtils;
 
 
 /**
@@ -150,7 +149,7 @@ public abstract class DataPartitioner
                //create output matrix object
                out.setPartitioned( _format, _n ); 
                
-               MatrixCharacteristics mcNew = new MatrixCharacteristics( rows, 
cols, IntUtils.toInt(brlen), IntUtils.toInt(bclen) ); 
+               MatrixCharacteristics mcNew = new MatrixCharacteristics( rows, 
cols, (int)brlen, (int)bclen ); 
                mcNew.setNonZeros( nonZeros );
                if( convertBlock2Cell )
                        ii = InputInfo.BinaryCellInputInfo;
@@ -177,11 +176,11 @@ public abstract class DataPartitioner
                {
                        case ROW_WISE:
                                //default assumption sparse, but reset per 
input block anyway
-                               tmp = new MatrixBlock( 1, IntUtils.toInt(cols), 
true, IntUtils.toInt(cols*0.1) );
+                               tmp = new MatrixBlock( 1, (int)cols, true, 
(int)(cols*0.1) );
                                break;
                        case COLUMN_WISE:
                                //default dense because single column alwyas 
below SKINNY_MATRIX_TURN_POINT
-                               tmp = new MatrixBlock( IntUtils.toInt(rows), 1, 
false );
+                               tmp = new MatrixBlock( (int)rows, 1, false );
                                break;
                        default:
                                //do nothing

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerLocal.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerLocal.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerLocal.java
index d4c78aa..ecb5ea0 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerLocal.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerLocal.java
@@ -56,7 +56,6 @@ import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.FastStringTokenizer;
 import org.apache.sysml.runtime.util.LocalFileUtils;
-import org.apache.sysml.utils.IntUtils;
 
 /**
  * Partitions a given matrix into row or column partitions with a two 
pass-approach.
@@ -190,8 +189,8 @@ public class DataPartitionerLocal extends DataPartitioner
                                Thread[] threads = new Thread[len];
                                for( int i=0;i<len;i++ )
                                {
-                                       int start = 
i*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len));
-                                       int end = 
(i+1)*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len)-1);
+                                       int start = 
i*(int)Math.ceil(((double)fnamesPartitions.length)/len);
+                                       int end = 
(i+1)*(int)Math.ceil(((double)fnamesPartitions.length)/len)-1;
                                        end = Math.min(end, 
fnamesPartitions.length-1);
                                        threads[i] = new Thread(new 
DataPartitionerWorkerTextCell(job, fnameNew, fnameStaging, fnamesPartitions, 
start, end));
                                        threads[i].start();
@@ -277,8 +276,8 @@ public class DataPartitionerLocal extends DataPartitioner
                                Thread[] threads = new Thread[len];
                                for( int i=0;i<len;i++ )
                                {
-                                       int start = 
i*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len));
-                                       int end = 
(i+1)*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len)-1);
+                                       int start = 
i*(int)Math.ceil(((double)fnamesPartitions.length)/len);
+                                       int end = 
(i+1)*(int)Math.ceil(((double)fnamesPartitions.length)/len)-1;
                                        end = Math.min(end, 
fnamesPartitions.length-1);
                                        threads[i] = new Thread(new 
DataPartitionerWorkerBinaryCell(job, fnameNew, fnameStaging, fnamesPartitions, 
start, end));
                                        threads[i].start();
@@ -359,8 +358,8 @@ public class DataPartitionerLocal extends DataPartitioner
                                Thread[] threads = new Thread[len];
                                for( int i=0;i<len;i++ )
                                {
-                                       int start = 
i*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len));
-                                       int end = 
(i+1)*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len)-1);
+                                       int start = 
i*(int)Math.ceil(((double)fnamesPartitions.length)/len);
+                                       int end = 
(i+1)*(int)Math.ceil(((double)fnamesPartitions.length)/len)-1;
                                        end = Math.min(end, 
fnamesPartitions.length-1);
                                        threads[i] = new Thread(new 
DataPartitionerWorkerBinaryBlock(job, fnameNew, fnameStaging, fnamesPartitions, 
start, end));
                                        threads[i].start();
@@ -463,8 +462,8 @@ public class DataPartitionerLocal extends DataPartitioner
                                Thread[] threads = new Thread[len];
                                for( int i=0;i<len;i++ )
                                {
-                                       int start = 
i*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len));
-                                       int end = 
(i+1)*IntUtils.toInt(Math.ceil(((double)fnamesPartitions.length)/len)-1);
+                                       int start = 
i*(int)Math.ceil(((double)fnamesPartitions.length)/len);
+                                       int end = 
(i+1)*(int)Math.ceil(((double)fnamesPartitions.length)/len)-1;
                                        end = Math.min(end, 
fnamesPartitions.length-1);
                                        threads[i] = new Thread(new 
DataPartitionerWorkerBinaryCell(job, fnameNew, fnameStaging, fnamesPartitions, 
start, end));
                                        threads[i].start();
@@ -497,12 +496,12 @@ public class DataPartitionerLocal extends DataPartitioner
 
                if( _format == PDataPartitionFormat.ROW_WISE ) 
                {       
-                       _reuseBlk.reset( 1, IntUtils.toInt(cols), sparse, 
IntUtils.toInt(cols*sparsity) );
+                       _reuseBlk.reset( 1, (int)cols, sparse, (int) 
(cols*sparsity) );
                        for( int i=0; i<rows; i++ )
                        {
                                String pdir = 
LocalFileUtils.checkAndCreateStagingDir(dir+"/"+(row_offset+1+i));
                                String pfname = 
pdir+"/"+"block_"+(col_offset/bclen+1);
-                               mb.slice(i, i, 0, IntUtils.toInt(cols-1), 
_reuseBlk);
+                               mb.slice(i, i, 0, (int)(cols-1), _reuseBlk);
                                LocalFileUtils.writeMatrixBlockToLocal(pfname, 
_reuseBlk);
                                _reuseBlk.reset();
                        }
@@ -516,13 +515,13 @@ public class DataPartitionerLocal extends DataPartitioner
                else if( _format == PDataPartitionFormat.COLUMN_WISE )
                {
                        //create object for reuse
-                       _reuseBlk.reset( IntUtils.toInt(rows), 1, false );
+                       _reuseBlk.reset( (int)rows, 1, false );
                        
                        for( int i=0; i<cols; i++ )
                        {
                                String pdir = 
LocalFileUtils.checkAndCreateStagingDir(dir+"/"+(col_offset+1+i));
                                String pfname = 
pdir+"/"+"block_"+(row_offset/brlen+1);                         
-                               mb.slice(0, IntUtils.toInt(rows-1), i, i, 
_reuseBlk);
+                               mb.slice(0, (int)(rows-1), i, i, _reuseBlk);
                                LocalFileUtils.writeMatrixBlockToLocal(pfname, 
_reuseBlk);
                                _reuseBlk.reset();
                        }                               

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
index fff563f..8195d91 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMR.java
@@ -38,7 +38,6 @@ import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.mapred.MRConfigurationNames;
 import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
 import org.apache.sysml.runtime.util.MapReduceTool;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 import org.apache.sysml.yarn.DMLAppMasterUtils;
 
@@ -150,7 +149,7 @@ public class DataPartitionerRemoteMR extends DataPartitioner
                            default:
                                        //do nothing
                    }
-                   job.setNumReduceTasks( IntUtils.toInt(Math.min( 
_numReducers, reducerGroups)) );    
+                   job.setNumReduceTasks( (int)Math.min( _numReducers, 
reducerGroups) );       
 
                        
                        //disable automatic tasks timeouts and speculative task 
exec

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMapper.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMapper.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMapper.java
index 84ea0f8..f535b32 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMapper.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteMapper.java
@@ -41,7 +41,6 @@ import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
 import org.apache.sysml.runtime.util.FastStringTokenizer;
 import org.apache.sysml.runtime.util.MapReduceTool;
-import org.apache.sysml.utils.IntUtils;
 
 /**
  * Remote data partitioner mapper implementation that does the actual
@@ -336,12 +335,12 @@ public class DataPartitionerRemoteMapper
                                switch( _pdf )
                                {
                                        case ROW_WISE:
-                                               _reuseBlk.reset(1, 
IntUtils.toInt(cols), sparse, IntUtils.toInt(cols*sparsity));                   
                                             
+                                               _reuseBlk.reset(1, (int)cols, 
sparse, (int)(cols*sparsity));                                                  
          
                                                for( int i=0; i<rows; i++ )
                                                {
                                                        
_longKey.set(row_offset+1+i);
                                                        _pairKey.setIndexes(1, 
(col_offset/_bclen+1) ); 
-                                                       value2.slice(i, i, 0, 
IntUtils.toInt(cols-1), _reuseBlk);
+                                                       value2.slice(i, i, 0, 
(int)(cols-1), _reuseBlk);
                                                        out.collect(_longKey, 
_pair);
                                                        _reuseBlk.reset();
                                                }
@@ -362,12 +361,12 @@ public class DataPartitionerRemoteMapper
                                                out.collect(_longKey, _pair);
                                                break;
                                        case COLUMN_WISE:
-                                               
_reuseBlk.reset(IntUtils.toInt(rows), 1, false);
+                                               _reuseBlk.reset((int)rows, 1, 
false);
                                                for( int i=0; i<cols; i++ )
                                                {
                                                        
_longKey.set(col_offset+1+i);
                                                        
_pairKey.setIndexes(row_offset/_brlen+1, 1);                                    
                
-                                                       value2.slice(0, 
IntUtils.toInt(rows-1), i, i, _reuseBlk);
+                                                       value2.slice(0, 
(int)(rows-1), i, i, _reuseBlk);
                                                        out.collect(_longKey, 
_pair );
                                                        _reuseBlk.reset();
                                                }       
@@ -672,9 +671,9 @@ public class DataPartitionerRemoteMapper
                                long numPartitions = -1;
                                switch( _pdf ){
                                        case ROW_WISE:                   
numPartitions = _rlen; break;
-                                       case ROW_BLOCK_WISE:     numPartitions 
= IntUtils.toInt(Math.ceil(_rlen/(double)_brlen)); break;
+                                       case ROW_BLOCK_WISE:     numPartitions 
= (int)Math.ceil(_rlen/(double)_brlen); break;
                                        case COLUMN_WISE:        numPartitions 
= _clen; break;
-                                       case COLUMN_BLOCK_WISE:  numPartitions 
= IntUtils.toInt(Math.ceil(_clen/(double)_bclen)); break;
+                                       case COLUMN_BLOCK_WISE:  numPartitions 
= (int)Math.ceil(_clen/(double)_bclen); break;
                                        default:
                                                //do nothing
                                }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
index 165bdd7..daab642 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSpark.java
@@ -34,7 +34,6 @@ import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.MapReduceTool;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 /**
@@ -75,7 +74,7 @@ public class DataPartitionerRemoteSpark extends 
DataPartitioner
                        
                        //determine degree of parallelism
                        MatrixCharacteristics mc = 
in.getMatrixCharacteristics();
-                       int numRed = IntUtils.toInt(determineNumReducers(inRdd, 
mc, _numRed));
+                       int numRed = (int)determineNumReducers(inRdd, mc, 
_numRed);
        
                        //run spark remote data partition job 
                        DataPartitionerRemoteSparkMapper dpfun = new 
DataPartitionerRemoteSparkMapper(mc, ii, oi, _format, _n);

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSparkMapper.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSparkMapper.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSparkMapper.java
index 0baf24b..b5f0956 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSparkMapper.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/DataPartitionerRemoteSparkMapper.java
@@ -35,7 +35,6 @@ import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.DataConverter;
-import org.apache.sysml.utils.IntUtils;
 
 import scala.Tuple2;
 
@@ -118,7 +117,7 @@ public class DataPartitionerRemoteSparkMapper extends 
ParWorker implements PairF
                                        for( int i=0; i<rows; i+=_n ) {
                                                PairWritableBlock tmp = new 
PairWritableBlock();
                                                tmp.indexes = new 
MatrixIndexes(1, col_offset/_bclen+1);
-                                               tmp.block = value2.slice(i, 
Math.min(i+IntUtils.toInt(_n)-1, value2.getNumRows()-1));
+                                               tmp.block = value2.slice(i, 
Math.min(i+(int)_n-1, value2.getNumRows()-1));
                                                ret.add(new 
Tuple2<Long,Writable>(new Long((row_offset+i)/_n+1),tmp));
                                        }
                                }
@@ -153,7 +152,7 @@ public class DataPartitionerRemoteSparkMapper extends 
ParWorker implements PairF
                                                PairWritableBlock tmp = new 
PairWritableBlock();
                                                tmp.indexes = new 
MatrixIndexes(row_offset/_brlen+1, 1);
                                                tmp.block = value2.slice(0, 
value2.getNumRows()-1, 
-                                                               i, 
Math.min(i+IntUtils.toInt(_n)-1, value2.getNumColumns()-1), new MatrixBlock());
+                                                               i, 
Math.min(i+(int)_n-1, value2.getNumColumns()-1), new MatrixBlock());
                                                ret.add(new 
Tuple2<Long,Writable>(new Long((col_offset+i)/_n+1),tmp));
                                        }
                                }

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
index 607291e..5623827 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForMR.java
@@ -57,7 +57,6 @@ import 
org.apache.sysml.runtime.matrix.mapred.MRConfigurationNames;
 import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
 import org.apache.sysml.runtime.util.MapReduceTool;
 import org.apache.sysml.runtime.util.ProgramConverter;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 import org.apache.sysml.yarn.DMLAppMasterUtils;
 
@@ -100,8 +99,8 @@ public class RemoteDPParForMR
                        Path path = new Path( input.getFileName() );
                        long rlen = input.getNumRows();
                        long clen = input.getNumColumns();
-                       int brlen = IntUtils.toInt( input.getNumRowsPerBlock());
-                       int bclen = IntUtils.toInt( 
input.getNumColumnsPerBlock());
+                       int brlen = (int) input.getNumRowsPerBlock();
+                       int bclen = (int) input.getNumColumnsPerBlock();
                        MRJobConfiguration.setPartitioningInfo(job, rlen, clen, 
brlen, bclen, InputInfo.BinaryBlockInputInfo, 
                                        oi, dpf._dpf, dpf._N, 
input.getFileName(), itervar, matrixvar, tSparseCol);
                        
job.setInputFormat(InputInfo.BinaryBlockInputInfo.inputFormatClass);
@@ -173,20 +172,20 @@ public class RemoteDPParForMR
                        // Process different counters 
                        Statistics.incrementNoOfExecutedMRJobs();
                        Group pgroup = 
runjob.getCounters().getGroup(ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME);
-                       int numTasks = IntUtils.toInt(pgroup.getCounter( 
Stat.PARFOR_NUMTASKS.toString() ));
-                       int numIters = IntUtils.toInt(pgroup.getCounter( 
Stat.PARFOR_NUMITERS.toString() ));
+                       int numTasks = (int)pgroup.getCounter( 
Stat.PARFOR_NUMTASKS.toString() );
+                       int numIters = (int)pgroup.getCounter( 
Stat.PARFOR_NUMITERS.toString() );
                        if( ConfigurationManager.isStatistics() && 
!InfrastructureAnalyzer.isLocalMode() ) {
                                Statistics.incrementJITCompileTime( 
pgroup.getCounter( Stat.PARFOR_JITCOMPILE.toString() ) );
                                Statistics.incrementJVMgcCount( 
pgroup.getCounter( Stat.PARFOR_JVMGC_COUNT.toString() ) );
                                Statistics.incrementJVMgcTime( 
pgroup.getCounter( Stat.PARFOR_JVMGC_TIME.toString() ) );
                                Group cgroup = 
runjob.getCounters().getGroup(CacheableData.CACHING_COUNTER_GROUP_NAME.toString());
-                               
CacheStatistics.incrementMemHits(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_MEM.toString() ));
-                               
CacheStatistics.incrementFSBuffHits(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_FSBUFF.toString() ));
-                               
CacheStatistics.incrementFSHits(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_FS.toString() ));
-                               
CacheStatistics.incrementHDFSHits(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_HDFS.toString() ));
-                               
CacheStatistics.incrementFSBuffWrites(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_WRITES_FSBUFF.toString() ));
-                               
CacheStatistics.incrementFSWrites(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_WRITES_FS.toString() ));
-                               
CacheStatistics.incrementHDFSWrites(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_WRITES_HDFS.toString() ));
+                               
CacheStatistics.incrementMemHits((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_MEM.toString() ));
+                               
CacheStatistics.incrementFSBuffHits((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_FSBUFF.toString() ));
+                               
CacheStatistics.incrementFSHits((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_FS.toString() ));
+                               
CacheStatistics.incrementHDFSHits((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_HITS_HDFS.toString() ));
+                               
CacheStatistics.incrementFSBuffWrites((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_WRITES_FSBUFF.toString() ));
+                               
CacheStatistics.incrementFSWrites((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_WRITES_FS.toString() ));
+                               
CacheStatistics.incrementHDFSWrites((int)cgroup.getCounter( 
CacheStatistics.Stat.CACHE_WRITES_HDFS.toString() ));
                                
CacheStatistics.incrementAcquireRTime(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_TIME_ACQR.toString() ));
                                
CacheStatistics.incrementAcquireMTime(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_TIME_ACQM.toString() ));
                                
CacheStatistics.incrementReleaseTime(cgroup.getCounter( 
CacheStatistics.Stat.CACHE_TIME_RLS.toString() ));

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
index 68eb19a..866e456 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSpark.java
@@ -57,7 +57,6 @@ import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.UtilFunctions;
-import org.apache.sysml.utils.IntUtils;
 import org.apache.sysml.utils.Statistics;
 
 /**
@@ -91,7 +90,7 @@ public class RemoteDPParForSpark
 
                //compute number of reducers (to avoid OOMs and reduce memory 
pressure)
                int numParts = SparkUtils.getNumPreferredPartitions(mc, in);
-               int numReducers2 = Math.max(numReducers, Math.min(numParts, 
IntUtils.toInt(dpf.getNumParts(mc))));
+               int numReducers2 = Math.max(numReducers, Math.min(numParts, 
(int)dpf.getNumParts(mc)));
                
                //core parfor datapartition-execute (w/ or w/o shuffle, 
depending on data characteristics)
                RemoteDPParForSparkWorker efun = new 
RemoteDPParForSparkWorker(program, clsMap, 
@@ -224,7 +223,7 @@ public class RemoteDPParForSpark
                        int off = _containsID ? 1: 0;
                        Object obj = _isVector ? arg0._1().get(off) : arg0._1();
                        boolean sparse = (obj instanceof SparseVector);
-                       MatrixBlock mb = new MatrixBlock(1, 
IntUtils.toInt(_clen), sparse);
+                       MatrixBlock mb = new MatrixBlock(1, (int)_clen, sparse);
                        
                        if( _isVector ) {
                                Vector vect = (Vector) obj;

http://git-wip-us.apache.org/repos/asf/systemml/blob/c3fdbb4d/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
index aa3b2a0..66b4283 100644
--- 
a/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
+++ 
b/src/main/java/org/apache/sysml/runtime/controlprogram/parfor/RemoteDPParForSparkWorker.java
@@ -44,7 +44,6 @@ import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
 import org.apache.sysml.runtime.matrix.data.MatrixBlock;
 import org.apache.sysml.runtime.matrix.data.OutputInfo;
 import org.apache.sysml.runtime.util.ProgramConverter;
-import org.apache.sysml.utils.IntUtils;
 
 import scala.Tuple2;
 
@@ -85,8 +84,8 @@ public class RemoteDPParForSparkWorker extends ParWorker 
implements PairFlatMapF
                _aIters = aiters;
                
                //setup matrix block partition meta data
-               _rlen = IntUtils.toInt(dpf.getNumRows(mc));
-               _clen = IntUtils.toInt(dpf.getNumColumns(mc));
+               _rlen = (int)dpf.getNumRows(mc);
+               _clen = (int)dpf.getNumColumns(mc);
                _brlen = mc.getRowsPerBlock();
                _bclen = mc.getColsPerBlock();
                _tSparseCol = tSparseCol;
@@ -126,7 +125,7 @@ public class RemoteDPParForSparkWorker extends ParWorker 
implements PairFlatMapF
                        
                        //maintain accumulators
                        _aTasks.add( 1 );
-                       _aIters.add( 
IntUtils.toInt(getExecutedIterations()-numIter) );
+                       _aIters.add( (int)(getExecutedIterations()-numIter) );
                }
                
                //write output if required (matrix indexed write)
@@ -144,7 +143,7 @@ public class RemoteDPParForSparkWorker extends ParWorker 
implements PairFlatMapF
                        CodegenUtils.getClassSync(e.getKey(), e.getValue());
        
                //parse and setup parfor body program
-               ParForBody body = ProgramConverter.parseParForBody(_prog, 
IntUtils.toInt(_workerID), true);
+               ParForBody body = ProgramConverter.parseParForBody(_prog, 
(int)_workerID, true);
                _childBlocks = body.getChildBlocks();
                _ec          = body.getEc();
                _resultVars  = body.getResultVariables();
@@ -200,8 +199,8 @@ public class RemoteDPParForSparkWorker extends ParWorker 
implements PairFlatMapF
                        long lnnz = 0;
                        for( Writable val : valueList ) {
                                PairWritableBlock pval = (PairWritableBlock) 
val;
-                               int row_offset = 
IntUtils.toInt(pval.indexes.getRowIndex()-1)*_brlen;
-                               int col_offset = 
IntUtils.toInt(pval.indexes.getColumnIndex()-1)*_bclen;
+                               int row_offset = 
(int)(pval.indexes.getRowIndex()-1)*_brlen;
+                               int col_offset = 
(int)(pval.indexes.getColumnIndex()-1)*_bclen;
                                if( !partition.isInSparseFormat() ) //DENSE
                                        partition.copy( row_offset, 
row_offset+pval.block.getNumRows()-1, 
                                                col_offset, 
col_offset+pval.block.getNumColumns()-1,
@@ -255,7 +254,7 @@ public class RemoteDPParForSparkWorker extends ParWorker 
implements PairFlatMapF
                                        PairWritableCell pairValue = 
(PairWritableCell)valueList.iterator().next();
                                        if( 
pairValue.indexes.getColumnIndex()<0 )
                                                continue; //cells used to 
ensure empty partitions
-                                       partition.quickSetValue(0, 
IntUtils.toInt(pairValue.indexes.getColumnIndex()-1), 
pairValue.cell.getValue());
+                                       partition.quickSetValue(0, 
(int)pairValue.indexes.getColumnIndex()-1, pairValue.cell.getValue());
                                }
                                break;
                        case COLUMN_WISE:
@@ -265,9 +264,9 @@ public class RemoteDPParForSparkWorker extends ParWorker 
implements PairFlatMapF
                                        if( pairValue.indexes.getRowIndex()<0 )
                                                continue; //cells used to 
ensure empty partitions
                                        if( _tSparseCol )
-                                               
partition.appendValue(0,IntUtils.toInt(pairValue.indexes.getRowIndex()-1), 
pairValue.cell.getValue());
+                                               
partition.appendValue(0,(int)pairValue.indexes.getRowIndex()-1, 
pairValue.cell.getValue());
                                        else
-                                               
partition.quickSetValue(IntUtils.toInt(pairValue.indexes.getRowIndex()-1), 0, 
pairValue.cell.getValue());
+                                               
partition.quickSetValue((int)pairValue.indexes.getRowIndex()-1, 0, 
pairValue.cell.getValue());
                                }
                                break;
                        default: 

Reply via email to