http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/ReorgSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/ReorgSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/ReorgSPInstruction.java
index 79d791d..3e09d17 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/ReorgSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/ReorgSPInstruction.java
@@ -79,9 +79,7 @@ public class ReorgSPInstruction extends UnarySPInstruction {
                _bSortIndInMem = bSortIndInMem;
        }
 
-       public static ReorgSPInstruction parseInstruction ( String str ) 
-               throws DMLRuntimeException 
-       {
+       public static ReorgSPInstruction parseInstruction ( String str ) {
                CPOperand in = new CPOperand("", ValueType.UNKNOWN, 
DataType.UNKNOWN);
                CPOperand out = new CPOperand("", ValueType.UNKNOWN, 
DataType.UNKNOWN);
                String opcode = InstructionUtils.getOpCode(str);
@@ -120,9 +118,7 @@ public class ReorgSPInstruction extends UnarySPInstruction {
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec)
-                       throws DMLRuntimeException 
-       {
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                String opcode = getOpcode();
 
@@ -217,9 +213,7 @@ public class ReorgSPInstruction extends UnarySPInstruction {
                sec.addLineageRDD(output.getName(), input1.getName());
        }
 
-       private void updateReorgMatrixCharacteristics(SparkExecutionContext 
sec) 
-               throws DMLRuntimeException
-       {
+       private void updateReorgMatrixCharacteristics(SparkExecutionContext 
sec) {
                MatrixCharacteristics mc1 = 
sec.getMatrixCharacteristics(input1.getName());
                MatrixCharacteristics mcOut = 
sec.getMatrixCharacteristics(output.getName());
                
@@ -256,19 +250,14 @@ public class ReorgSPInstruction extends 
UnarySPInstruction {
                private ReorgOperator _reorgOp = null;
                private MatrixCharacteristics _mcIn = null;
                
-               public RDDDiagV2MFunction(MatrixCharacteristics mcIn) 
-                       throws DMLRuntimeException 
-               {
+               public RDDDiagV2MFunction(MatrixCharacteristics mcIn) {
                        _reorgOp = new 
ReorgOperator(DiagIndex.getDiagIndexFnObject());
                        _mcIn = mcIn;
                }
                
                @Override
-               public Iterator<Tuple2<MatrixIndexes, MatrixBlock>> call( 
Tuple2<MatrixIndexes, MatrixBlock> arg0 ) 
-                       throws Exception 
-               {
+               public Iterator<Tuple2<MatrixIndexes, MatrixBlock>> call( 
Tuple2<MatrixIndexes, MatrixBlock> arg0 ) {
                        ArrayList<Tuple2<MatrixIndexes, MatrixBlock>> ret = new 
ArrayList<>();
-                       
                        MatrixIndexes ixIn = arg0._1();
                        MatrixBlock blkIn = arg0._2();
                        
@@ -299,16 +288,12 @@ public class ReorgSPInstruction extends 
UnarySPInstruction {
                
                private MatrixCharacteristics _mcIn = null;
                
-               public RDDRevFunction(MatrixCharacteristics mcIn) 
-                       throws DMLRuntimeException 
-               {
+               public RDDRevFunction(MatrixCharacteristics mcIn) {
                        _mcIn = mcIn;
                }
                
                @Override
-               public Iterator<Tuple2<MatrixIndexes, MatrixBlock>> call( 
Tuple2<MatrixIndexes, MatrixBlock> arg0 ) 
-                       throws Exception 
-               {
+               public Iterator<Tuple2<MatrixIndexes, MatrixBlock>> call( 
Tuple2<MatrixIndexes, MatrixBlock> arg0 ) {
                        //construct input
                        IndexedMatrixValue in = 
SparkUtils.toIndexedMatrixBlock(arg0);
                        
@@ -352,9 +337,7 @@ public class ReorgSPInstruction extends UnarySPInstruction {
                        _bclen = mc.getColsPerBlock();
                }
                
-               public Tuple2<MatrixIndexes, MatrixBlock> 
call(Tuple2<MatrixIndexes, MatrixBlock> arg0)
-                       throws Exception 
-               {
+               public Tuple2<MatrixIndexes, MatrixBlock> 
call(Tuple2<MatrixIndexes, MatrixBlock> arg0) {
                        MatrixIndexes ix = arg0._1();
                        MatrixBlock in = arg0._2();
                        MatrixBlock out = new MatrixBlock(in.getNumRows(), 
_cols.length, true);

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/RmmSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/RmmSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/RmmSPInstruction.java
index 1aceee8..05f3870 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/RmmSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/RmmSPInstruction.java
@@ -54,9 +54,7 @@ public class RmmSPInstruction extends BinarySPInstruction {
                super(SPType.RMM, op, in1, in2, out, opcode, istr);
        }
 
-       public static RmmSPInstruction parseInstruction( String str ) 
-               throws DMLRuntimeException 
-       {
+       public static RmmSPInstruction parseInstruction( String str ) {
                String parts[] = 
InstructionUtils.getInstructionPartsWithValueType(str);
                String opcode = parts[0];
 
@@ -69,13 +67,11 @@ public class RmmSPInstruction extends BinarySPInstruction {
                } 
                else {
                        throw new 
DMLRuntimeException("RmmSPInstruction.parseInstruction():: Unknown opcode " + 
opcode);
-               }               
+               }
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException
-       {       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                //get input rdds

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/SPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/SPInstruction.java 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/SPInstruction.java
index e3d8dfd..cae232e 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/SPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/SPInstruction.java
@@ -20,7 +20,6 @@
 package org.apache.sysml.runtime.instructions.spark;
 
 import org.apache.sysml.lops.runtime.RunMRJobs;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
 import org.apache.sysml.runtime.instructions.Instruction;
 import org.apache.sysml.runtime.instructions.SPInstructionParser;
@@ -74,9 +73,7 @@ public abstract class SPInstruction extends Instruction {
        }
        
        @Override
-       public Instruction preprocessInstruction(ExecutionContext ec)
-               throws DMLRuntimeException 
-       {
+       public Instruction preprocessInstruction(ExecutionContext ec) {
                //default pre-process behavior (e.g., debug state)
                Instruction tmp = super.preprocessInstruction(ec);
                
@@ -92,13 +89,10 @@ public abstract class SPInstruction extends Instruction {
        }
 
        @Override 
-       public abstract void processInstruction(ExecutionContext ec)
-                       throws DMLRuntimeException;
+       public abstract void processInstruction(ExecutionContext ec);
 
        @Override
-       public void postprocessInstruction(ExecutionContext ec)
-                       throws DMLRuntimeException 
-       {
+       public void postprocessInstruction(ExecutionContext ec) {
                //maintain statistics
                Statistics.incrementNoOfExecutedSPInst();
                

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/SpoofSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/SpoofSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/SpoofSPInstruction.java
index 2f41fa7..314e118 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/SpoofSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/SpoofSPInstruction.java
@@ -83,9 +83,7 @@ public class SpoofSPInstruction extends SPInstruction {
                _out = out;
        }
 
-       public static SpoofSPInstruction parseInstruction(String str) 
-               throws DMLRuntimeException
-       {
+       public static SpoofSPInstruction parseInstruction(String str) {
                String[] parts = 
InstructionUtils.getInstructionPartsWithValueType(str);
                
                //String opcode = parts[0];
@@ -103,9 +101,7 @@ public class SpoofSPInstruction extends SPInstruction {
        }
 
        @Override
-       public void processInstruction(ExecutionContext ec)
-               throws DMLRuntimeException 
-       {
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                //decide upon broadcast side inputs
@@ -242,9 +238,7 @@ public class SpoofSPInstruction extends SPInstruction {
                }
        }
        
-       private static boolean[] determineBroadcastInputs(SparkExecutionContext 
sec, CPOperand[] inputs) 
-               throws DMLRuntimeException 
-       {
+       private static boolean[] determineBroadcastInputs(SparkExecutionContext 
sec, CPOperand[] inputs) {
                boolean[] ret = new boolean[inputs.length];
                double localBudget = OptimizerUtils.getLocalMemBudget()
                        - CacheableData.getBroadcastSize(); //account for other 
broadcasts
@@ -270,9 +264,7 @@ public class SpoofSPInstruction extends SPInstruction {
                return ret;
        }
        
-       private static boolean[] getMatrixBroadcastVector(SparkExecutionContext 
sec, CPOperand[] inputs, boolean[] bcVect) 
-                       throws DMLRuntimeException 
-       {
+       private static boolean[] getMatrixBroadcastVector(SparkExecutionContext 
sec, CPOperand[] inputs, boolean[] bcVect) {
                int numMtx = (int) Arrays.stream(inputs)
                        .filter(in -> in.getDataType().isMatrix()).count();
                boolean[] ret = new boolean[numMtx];
@@ -282,9 +274,7 @@ public class SpoofSPInstruction extends SPInstruction {
                return ret;
        }
        
-       private static JavaPairRDD<MatrixIndexes, MatrixBlock[]> 
createJoinedInputRDD(SparkExecutionContext sec, CPOperand[] inputs, boolean[] 
bcVect, boolean outer) 
-               throws DMLRuntimeException
-       {
+       private static JavaPairRDD<MatrixIndexes, MatrixBlock[]> 
createJoinedInputRDD(SparkExecutionContext sec, CPOperand[] inputs, boolean[] 
bcVect, boolean outer) {
                //get input rdd for main input
                int main = getMainInputIndex(inputs, bcVect);
                MatrixCharacteristics mcIn = 
sec.getMatrixCharacteristics(inputs[main].getName());
@@ -313,9 +303,7 @@ public class SpoofSPInstruction extends SPInstruction {
                return ret;
        }
        
-       private static void maintainLineageInfo(SparkExecutionContext sec, 
CPOperand[] inputs, boolean[] bcVect, CPOperand output) 
-               throws DMLRuntimeException 
-       {
+       private static void maintainLineageInfo(SparkExecutionContext sec, 
CPOperand[] inputs, boolean[] bcVect, CPOperand output) {
                //add lineage info for all rdd/broadcast inputs 
                for( int i=0; i<inputs.length; i++ )
                        if( inputs[i].getDataType().isMatrix() )
@@ -327,9 +315,7 @@ public class SpoofSPInstruction extends SPInstruction {
                        .filter(i -> inputs[i].isMatrix() && 
!bcVect[i]).min().orElse(0);
        }
        
-       private void updateOutputMatrixCharacteristics(SparkExecutionContext 
sec, SpoofOperator op) 
-               throws DMLRuntimeException 
-       {
+       private void updateOutputMatrixCharacteristics(SparkExecutionContext 
sec, SpoofOperator op) {
                if(op instanceof SpoofCellwise)
                {
                        MatrixCharacteristics mcIn = 
sec.getMatrixCharacteristics(_in[0].getName());
@@ -408,15 +394,11 @@ public class SpoofSPInstruction extends SPInstruction {
                        _className = className;
                }
                
-               protected ArrayList<MatrixBlock> 
getAllMatrixInputs(MatrixIndexes ixIn, MatrixBlock[] blkIn) 
-                       throws DMLRuntimeException 
-               {
+               protected ArrayList<MatrixBlock> 
getAllMatrixInputs(MatrixIndexes ixIn, MatrixBlock[] blkIn) {
                        return getAllMatrixInputs(ixIn, blkIn, false);
                }
                
-               protected ArrayList<MatrixBlock> 
getAllMatrixInputs(MatrixIndexes ixIn, MatrixBlock[] blkIn, boolean outer) 
-                       throws DMLRuntimeException 
-               {
+               protected ArrayList<MatrixBlock> 
getAllMatrixInputs(MatrixIndexes ixIn, MatrixBlock[] blkIn, boolean outer) {
                        ArrayList<MatrixBlock> ret = new ArrayList<>();
                        //add all rdd/broadcast inputs (main and side inputs)
                        for( int i=0, posRdd=0, posBc=0; i<_bcInd.length; i++ ) 
{
@@ -444,18 +426,14 @@ public class SpoofSPInstruction extends SPInstruction {
                private final int _clen2;
                private SpoofRowwise _op = null;
                
-               public RowwiseFunction(String className, byte[] classBytes, 
boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> bcMatrices, 
ArrayList<ScalarObject> scalars, int clen, int clen2) 
-                       throws DMLRuntimeException
-               {                       
+               public RowwiseFunction(String className, byte[] classBytes, 
boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> bcMatrices, 
ArrayList<ScalarObject> scalars, int clen, int clen2) {
                        super(className, classBytes, bcInd, bcMatrices, 
scalars);
                        _clen = clen;
                        _clen2 = clen;
                }
                
                @Override
-               public Iterator<Tuple2<MatrixIndexes, MatrixBlock>> call( 
Iterator<Tuple2<MatrixIndexes, MatrixBlock[]>> arg ) 
-                       throws Exception 
-               {
+               public Iterator<Tuple2<MatrixIndexes, MatrixBlock>> call( 
Iterator<Tuple2<MatrixIndexes, MatrixBlock[]>> arg ) {
                        //lazy load of shipped class
                        if( _op == null ) {
                                Class<?> loadedClass = 
CodegenUtils.getClassSync(_className, _classBytes);
@@ -506,9 +484,7 @@ public class SpoofSPInstruction extends SPInstruction {
                
                private SpoofOperator _op = null;
                
-               public CellwiseFunction(String className, byte[] classBytes, 
boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> bcMatrices, 
ArrayList<ScalarObject> scalars) 
-                       throws DMLRuntimeException
-               {
+               public CellwiseFunction(String className, byte[] classBytes, 
boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> bcMatrices, 
ArrayList<ScalarObject> scalars) {
                        super(className, classBytes, bcInd, bcMatrices, 
scalars);
                }
                
@@ -558,9 +534,7 @@ public class SpoofSPInstruction extends SPInstruction {
                
                private SpoofOperator _op = null;
                
-               public MultiAggregateFunction(String className, byte[] 
classBytes, boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> 
bcMatrices, ArrayList<ScalarObject> scalars) 
-                       throws DMLRuntimeException
-               {
+               public MultiAggregateFunction(String className, byte[] 
classBytes, boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> 
bcMatrices, ArrayList<ScalarObject> scalars) {
                        super(className, classBytes, bcInd, bcMatrices, 
scalars);
                }
                
@@ -620,9 +594,7 @@ public class SpoofSPInstruction extends SPInstruction {
                
                private SpoofOperator _op = null;
                
-               public OuterProductFunction(String className, byte[] 
classBytes, boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> 
bcMatrices, ArrayList<ScalarObject> scalars) 
-                               throws DMLRuntimeException
-               {
+               public OuterProductFunction(String className, byte[] 
classBytes, boolean[] bcInd, ArrayList<PartitionedBroadcast<MatrixBlock>> 
bcMatrices, ArrayList<ScalarObject> scalars) {
                        super(className, classBytes, bcInd, bcMatrices, 
scalars);
                }
                

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/TernarySPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/TernarySPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/TernarySPInstruction.java
index 96fd310..07265ff 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/TernarySPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/TernarySPInstruction.java
@@ -23,7 +23,6 @@ import java.io.Serializable;
 
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.function.Function;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
 import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
 import org.apache.sysml.runtime.instructions.InstructionUtils;
@@ -36,13 +35,11 @@ import 
org.apache.sysml.runtime.matrix.operators.TernaryOperator;
 import scala.Tuple2;
 
 public class TernarySPInstruction extends ComputationSPInstruction {
-       private TernarySPInstruction(TernaryOperator op, CPOperand in1, 
CPOperand in2, CPOperand in3, CPOperand out,
-                       String opcode, String str) throws DMLRuntimeException {
+       private TernarySPInstruction(TernaryOperator op, CPOperand in1, 
CPOperand in2, CPOperand in3, CPOperand out, String opcode, String str) {
                super(SPType.Ternary, op, in1, in2, in3, out, opcode, str);
        }
 
-       public static TernarySPInstruction parseInstruction(String str) throws 
DMLRuntimeException
-       {
+       public static TernarySPInstruction parseInstruction(String str) {
                String[] parts = 
InstructionUtils.getInstructionPartsWithValueType(str);
                String opcode=parts[0];
                CPOperand operand1 = new CPOperand(parts[1]);
@@ -54,9 +51,7 @@ public class TernarySPInstruction extends 
ComputationSPInstruction {
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException
-       {
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                JavaPairRDD<MatrixIndexes,MatrixBlock> in1 = !input1.isMatrix() 
? null :
                        
sec.getBinaryBlockRDDHandleForVariable(input1.getName());
@@ -100,9 +95,7 @@ public class TernarySPInstruction extends 
ComputationSPInstruction {
                        sec.addLineageRDD(output.getName(), input3.getName());
        }
        
-       protected void 
updateTernaryOutputMatrixCharacteristics(SparkExecutionContext sec) 
-               throws DMLRuntimeException
-       {
+       protected void 
updateTernaryOutputMatrixCharacteristics(SparkExecutionContext sec) {
                MatrixCharacteristics mcOut = 
sec.getMatrixCharacteristics(output.getName());
                for(CPOperand input : new CPOperand[]{input1, input2, input3})
                        if( input.isMatrix() ) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/Tsmm2SPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/Tsmm2SPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/Tsmm2SPInstruction.java
index 344e002..b5e8d87 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/Tsmm2SPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/Tsmm2SPInstruction.java
@@ -61,28 +61,20 @@ public class Tsmm2SPInstruction extends UnarySPInstruction {
                _type = type;
        }
 
-       public static Tsmm2SPInstruction parseInstruction( String str ) 
-               throws DMLRuntimeException 
-       {
+       public static Tsmm2SPInstruction parseInstruction( String str ) {
                String parts[] = 
InstructionUtils.getInstructionPartsWithValueType(str);
                String opcode = parts[0];
-               
                //check supported opcode 
-               if ( !opcode.equalsIgnoreCase("tsmm2") ) {
-                       throw new 
DMLRuntimeException("Tsmm2SPInstruction.parseInstruction():: Unknown opcode " + 
opcode);                      
-               }
-                       
+               if ( !opcode.equalsIgnoreCase("tsmm2") )
+                       throw new 
DMLRuntimeException("Tsmm2SPInstruction.parseInstruction():: Unknown opcode " + 
opcode);
                CPOperand in1 = new CPOperand(parts[1]);
                CPOperand out = new CPOperand(parts[2]);
                MMTSJType type = MMTSJType.valueOf(parts[3]);
-               
                return new Tsmm2SPInstruction(null, in1, out, type, opcode, 
str);
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException
-       {       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                //get input
@@ -265,9 +257,8 @@ public class Tsmm2SPInstruction extends UnarySPInstruction {
         * @param in input matrix block
         * @param out output matrix block
         * @return matrix block
-        * @throws DMLRuntimeException if DMLRuntimeException occurs
         */
-       private static MatrixBlock transpose(MatrixBlock in, MatrixBlock out) 
throws DMLRuntimeException {
+       private static MatrixBlock transpose(MatrixBlock in, MatrixBlock out) {
                if( out == null )
                        out = new MatrixBlock(in.getNumColumns(), 
in.getNumRows(), in.getNonZeros());
                else

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/TsmmSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/TsmmSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/TsmmSPInstruction.java
index 990ed3f..2897778 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/TsmmSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/TsmmSPInstruction.java
@@ -44,28 +44,20 @@ public class TsmmSPInstruction extends UnarySPInstruction {
                _type = type;
        }
 
-       public static TsmmSPInstruction parseInstruction( String str ) 
-               throws DMLRuntimeException 
-       {
+       public static TsmmSPInstruction parseInstruction( String str ) {
                String parts[] = 
InstructionUtils.getInstructionPartsWithValueType(str);
                String opcode = parts[0];
-               
                //check supported opcode 
-               if ( !opcode.equalsIgnoreCase("tsmm") ) {
-                       throw new 
DMLRuntimeException("TsmmSPInstruction.parseInstruction():: Unknown opcode " + 
opcode);                       
-               }
-                       
+               if ( !opcode.equalsIgnoreCase("tsmm") )
+                       throw new 
DMLRuntimeException("TsmmSPInstruction.parseInstruction():: Unknown opcode " + 
opcode);
                CPOperand in1 = new CPOperand(parts[1]);
                CPOperand out = new CPOperand(parts[2]);
                MMTSJType type = MMTSJType.valueOf(parts[3]);
-               
                return new TsmmSPInstruction(null, in1, out, type, opcode, str);
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException
-       {       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                //get input

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/UaggOuterChainSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/UaggOuterChainSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/UaggOuterChainSPInstruction.java
index a711582..118c1b6 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/UaggOuterChainSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/UaggOuterChainSPInstruction.java
@@ -76,9 +76,7 @@ public class UaggOuterChainSPInstruction extends 
BinarySPInstruction {
                instString = istr;
        }
 
-       public static UaggOuterChainSPInstruction parseInstruction( String str 
) 
-               throws DMLRuntimeException 
-       {
+       public static UaggOuterChainSPInstruction parseInstruction( String str 
) {
                String parts[] = 
InstructionUtils.getInstructionPartsWithValueType(str);
                String opcode = parts[0];
 
@@ -106,9 +104,7 @@ public class UaggOuterChainSPInstruction extends 
BinarySPInstruction {
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException
-       {       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                boolean rightCached = (_uaggOp.indexFn instanceof ReduceCol || 
_uaggOp.indexFn instanceof ReduceAll
@@ -186,11 +182,8 @@ public class UaggOuterChainSPInstruction extends 
BinarySPInstruction {
                        return mcIn.dimsKnown() && mcIn.getRows() <= 
mcIn.getRowsPerBlock();
        }
 
-       protected void 
updateUnaryAggOutputMatrixCharacteristics(SparkExecutionContext sec) 
-               throws DMLRuntimeException
-       {       
+       protected void 
updateUnaryAggOutputMatrixCharacteristics(SparkExecutionContext sec) {
                String strInput1Name, strInput2Name;
-               
                if(_uaggOp.indexFn instanceof ReduceCol) {
                        strInput1Name = input1.getName();
                        strInput2Name = input2.getName();
@@ -198,7 +191,6 @@ public class UaggOuterChainSPInstruction extends 
BinarySPInstruction {
                        strInput1Name = input2.getName();
                        strInput2Name = input1.getName();
                }
-                               
                MatrixCharacteristics mc1 = 
sec.getMatrixCharacteristics(strInput1Name);
                MatrixCharacteristics mc2 = 
sec.getMatrixCharacteristics(strInput2Name);
                MatrixCharacteristics mcOut = 
sec.getMatrixCharacteristics(output.getName());

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/UnaryMatrixSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/UnaryMatrixSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/UnaryMatrixSPInstruction.java
index fb1e626..0ce49bb 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/UnaryMatrixSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/UnaryMatrixSPInstruction.java
@@ -23,7 +23,6 @@ import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.function.Function;
 import org.apache.sysml.parser.Expression.DataType;
 import org.apache.sysml.parser.Expression.ValueType;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
 import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
 import org.apache.sysml.runtime.instructions.InstructionUtils;
@@ -39,7 +38,7 @@ public class UnaryMatrixSPInstruction extends 
UnarySPInstruction {
                super(SPType.Unary, op, in, out, opcode, instr);
        }
        
-       public static UnarySPInstruction parseInstruction ( String str ) throws 
DMLRuntimeException {
+       public static UnarySPInstruction parseInstruction ( String str ) {
                CPOperand in = new CPOperand("", ValueType.UNKNOWN, 
DataType.UNKNOWN);
                CPOperand out = new CPOperand("", ValueType.UNKNOWN, 
DataType.UNKNOWN);
                String opcode = parseUnaryInstruction(str, in, out);
@@ -48,9 +47,7 @@ public class UnaryMatrixSPInstruction extends 
UnarySPInstruction {
        }
 
        @Override 
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException 
-       {       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                //get input

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
index 0f58789..86a4bd4 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/UnarySPInstruction.java
@@ -38,25 +38,22 @@ public abstract class UnarySPInstruction extends 
ComputationSPInstruction {
                super(type, op, in1, in2, in3, out, opcode, instr);
        }
 
-       static String parseUnaryInstruction(String instr, CPOperand in,
-                       CPOperand out) throws DMLRuntimeException {
+       static String parseUnaryInstruction(String instr, CPOperand in, 
CPOperand out) {
                InstructionUtils.checkNumFields(instr, 2);
                return parse(instr, in, null, null, out);
        }
 
-       static String parseUnaryInstruction(String instr, CPOperand in1,
-                       CPOperand in2, CPOperand out) throws 
DMLRuntimeException {
+       static String parseUnaryInstruction(String instr, CPOperand in1, 
CPOperand in2, CPOperand out) {
                InstructionUtils.checkNumFields(instr, 3);
                return parse(instr, in1, in2, null, out);
        }
 
-       static String parseUnaryInstruction(String instr, CPOperand in1,
-                       CPOperand in2, CPOperand in3, CPOperand out) throws 
DMLRuntimeException {
+       static String parseUnaryInstruction(String instr, CPOperand in1, 
CPOperand in2, CPOperand in3, CPOperand out) {
                InstructionUtils.checkNumFields(instr, 4);
                return parse(instr, in1, in2, in3, out);
        }
 
-       private static String parse(String instr, CPOperand in1, CPOperand in2, 
CPOperand in3, CPOperand out) throws DMLRuntimeException {
+       private static String parse(String instr, CPOperand in1, CPOperand in2, 
CPOperand in3, CPOperand out) {
                String[] parts = 
InstructionUtils.getInstructionPartsWithValueType(instr);
                
                // first part is the opcode, last part is the output, middle 
parts are input operands

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/WriteSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/WriteSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/WriteSPInstruction.java
index 76f8c8f..76fd851 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/WriteSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/WriteSPInstruction.java
@@ -64,9 +64,7 @@ public class WriteSPInstruction extends SPInstruction {
                formatProperties = null; // set in case of csv
        }
 
-       public static WriteSPInstruction parseInstruction ( String str ) 
-               throws DMLRuntimeException 
-       {
+       public static WriteSPInstruction parseInstruction ( String str ) {
                String[] parts = 
InstructionUtils.getInstructionPartsWithValueType ( str );
                String opcode = parts[0];
                
@@ -116,9 +114,7 @@ public class WriteSPInstruction extends SPInstruction {
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec)
-               throws DMLRuntimeException 
-       {                       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext) ec;
 
                //get filename (literal or variable expression)
@@ -151,7 +147,7 @@ public class WriteSPInstruction extends SPInstruction {
        }
 
        protected void processMatrixWriteInstruction(SparkExecutionContext sec, 
String fname, OutputInfo oi) 
-               throws DMLRuntimeException, IOException
+               throws IOException
        {
                //get input rdd
                JavaPairRDD<MatrixIndexes,MatrixBlock> in1 = 
sec.getBinaryBlockRDDHandleForVariable( input1.getName() );
@@ -235,7 +231,7 @@ public class WriteSPInstruction extends SPInstruction {
 
        @SuppressWarnings("unchecked")
        protected void processFrameWriteInstruction(SparkExecutionContext sec, 
String fname, OutputInfo oi, ValueType[] schema) 
-               throws DMLRuntimeException, IOException
+               throws IOException
        {
                //get input rdd
                JavaPairRDD<Long,FrameBlock> in1 = 
(JavaPairRDD<Long,FrameBlock>)sec
@@ -268,9 +264,7 @@ public class WriteSPInstruction extends SPInstruction {
                MapReduceTool.writeMetaDataFile(fname + ".mtd", 
input1.getValueType(), schema, DataType.FRAME, mc, oi, formatProperties);       
        }
 
-       private static void customSaveTextFile(JavaRDD<String> rdd, String 
fname, boolean inSingleFile) 
-               throws DMLRuntimeException 
-       {
+       private static void customSaveTextFile(JavaRDD<String> rdd, String 
fname, boolean inSingleFile) {
                if(inSingleFile) {
                        Random rand = new Random();
                        String randFName = fname + "_" + rand.nextLong() + "_" 
+ rand.nextLong();

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/ZipmmSPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/ZipmmSPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/ZipmmSPInstruction.java
index d31dce5..ec0b300 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/ZipmmSPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/ZipmmSPInstruction.java
@@ -51,9 +51,7 @@ public class ZipmmSPInstruction extends BinarySPInstruction {
                _tRewrite = tRewrite;
        }
 
-       public static ZipmmSPInstruction parseInstruction( String str ) 
-               throws DMLRuntimeException 
-       {
+       public static ZipmmSPInstruction parseInstruction( String str ) {
                String[] parts = 
InstructionUtils.getInstructionPartsWithValueType(str);
                String opcode = parts[0];
 
@@ -70,13 +68,10 @@ public class ZipmmSPInstruction extends BinarySPInstruction 
{
                else {
                        throw new 
DMLRuntimeException("ZipmmSPInstruction.parseInstruction():: Unknown opcode " + 
opcode);
                }
-               
        }
        
        @Override
-       public void processInstruction(ExecutionContext ec) 
-               throws DMLRuntimeException
-       {       
+       public void processInstruction(ExecutionContext ec) {
                SparkExecutionContext sec = (SparkExecutionContext)ec;
                
                //get rdd inputs (for computing r = t(X)%*%y via r = 
t(t(y)%*%X))

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBlock.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBlock.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBlock.java
index c4febbf..ba4660a 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBlock.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBlock.java
@@ -146,9 +146,7 @@ public class PartitionedBlock<T extends CacheBlock> 
implements Externalizable
        }
 
        @SuppressWarnings("unchecked")
-       public T getBlock(int rowIndex, int colIndex) 
-               throws DMLRuntimeException 
-       {
+       public T getBlock(int rowIndex, int colIndex) {
                //check for valid block index
                int nrblks = getNumRowBlocks();
                int ncblks = getNumColumnBlocks();
@@ -163,9 +161,7 @@ public class PartitionedBlock<T extends CacheBlock> 
implements Externalizable
                return (T)_partBlocks[ix];
        }
 
-       public void setBlock(int rowIndex, int colIndex, T block) 
-               throws DMLRuntimeException
-       {
+       public void setBlock(int rowIndex, int colIndex, T block) {
                //check for valid block index
                int nrblks = getNumRowBlocks();
                int ncblks = getNumColumnBlocks();
@@ -212,12 +208,9 @@ public class PartitionedBlock<T extends CacheBlock> 
implements Externalizable
         * @param cu column upper bound
         * @param block block object
         * @return block object
-        * @throws DMLRuntimeException if DMLRuntimeException occurs
         */
        @SuppressWarnings("unchecked")
-       public T slice(long rl, long ru, long cl, long cu, T block) 
-               throws DMLRuntimeException 
-       {
+       public T slice(long rl, long ru, long cl, long cu, T block) {
                int lrl = (int) rl;
                int lru = (int) ru;
                int lcl = (int) cl;

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBroadcast.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBroadcast.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBroadcast.java
index ef5395b..94fcee4 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBroadcast.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/data/PartitionedBroadcast.java
@@ -22,7 +22,6 @@ package org.apache.sysml.runtime.instructions.spark.data;
 import java.io.Serializable;
 
 import org.apache.spark.broadcast.Broadcast;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.CacheBlock;
 import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
@@ -79,9 +78,7 @@ public class PartitionedBroadcast<T extends CacheBlock> 
implements Serializable
                        Math.min(rlen, brlen) / Math.min(clen, bclen));
        }
 
-       public T getBlock(int rowIndex, int colIndex) 
-               throws DMLRuntimeException 
-       {
+       public T getBlock(int rowIndex, int colIndex) {
                int pix = 0;
                if( _pbc.length > 1 ) { //compute partition index
                        int numPerPart = 
computeBlocksPerPartition(_mc.getRows(),
@@ -93,9 +90,7 @@ public class PartitionedBroadcast<T extends CacheBlock> 
implements Serializable
                return _pbc[pix].value().getBlock(rowIndex, colIndex);
        }
        
-       public T slice(long rl, long ru, long cl, long cu, T block) 
-               throws DMLRuntimeException 
-       {
+       public T slice(long rl, long ru, long cl, long cu, T block) {
                T ret = null;
                for( Broadcast<PartitionedBlock<T>> bc : _pbc ) {
                        PartitionedBlock<T> pm = bc.value();

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ComputeBinaryBlockNnzFunction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ComputeBinaryBlockNnzFunction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ComputeBinaryBlockNnzFunction.java
index 7b6daad..293068d 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ComputeBinaryBlockNnzFunction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ComputeBinaryBlockNnzFunction.java
@@ -29,14 +29,12 @@ public class ComputeBinaryBlockNnzFunction implements 
Function<MatrixBlock,Matri
        
        private LongAccumulator _aNnz = null;
        
-       public ComputeBinaryBlockNnzFunction( LongAccumulator aNnz )
-       {
+       public ComputeBinaryBlockNnzFunction( LongAccumulator aNnz ) {
                _aNnz = aNnz;
        }
 
        @Override
-       public MatrixBlock call(MatrixBlock arg0) throws Exception 
-       {
+       public MatrixBlock call(MatrixBlock arg0) {
                _aNnz.add( arg0.getNonZeros() );
                return arg0;
        }

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertMatrixBlockToIJVLines.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertMatrixBlockToIJVLines.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertMatrixBlockToIJVLines.java
index 621fb06..daaf43c 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertMatrixBlockToIJVLines.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertMatrixBlockToIJVLines.java
@@ -39,7 +39,7 @@ public class ConvertMatrixBlockToIJVLines implements 
FlatMapFunction<Tuple2<Matr
        }
        
        @Override
-       public Iterator<String> call(Tuple2<MatrixIndexes, MatrixBlock> kv) 
throws Exception {
+       public Iterator<String> call(Tuple2<MatrixIndexes, MatrixBlock> kv) {
                final BinaryBlockToTextCellConverter converter = new 
BinaryBlockToTextCellConverter();
                converter.setBlockSize(brlen, bclen);
                converter.convert(kv._1, kv._2);

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertStringToLongTextPair.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertStringToLongTextPair.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertStringToLongTextPair.java
index 26dbf5e..8ed25d1 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertStringToLongTextPair.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ConvertStringToLongTextPair.java
@@ -29,8 +29,7 @@ public class ConvertStringToLongTextPair implements 
PairFunction<String, LongWri
        private static final long serialVersionUID = 6443041051069809479L;
 
        @Override
-       public Tuple2<LongWritable, Text> call(String arg0) throws Exception {
+       public Tuple2<LongWritable, Text> call(String arg0) {
                return new Tuple2<>(new LongWritable(1), new Text(arg0));
        }
-
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ExtractBlockForBinaryReblock.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ExtractBlockForBinaryReblock.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ExtractBlockForBinaryReblock.java
index e6042ed..a2a1ce0 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ExtractBlockForBinaryReblock.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ExtractBlockForBinaryReblock.java
@@ -43,9 +43,7 @@ public class ExtractBlockForBinaryReblock implements 
PairFlatMapFunction<Tuple2<
        private int out_brlen; 
        private int out_bclen;
        
-       public ExtractBlockForBinaryReblock(MatrixCharacteristics mcIn, 
MatrixCharacteristics mcOut) 
-               throws DMLRuntimeException 
-       {
+       public ExtractBlockForBinaryReblock(MatrixCharacteristics mcIn, 
MatrixCharacteristics mcOut) {
                rlen = mcIn.getRows(); 
                clen = mcIn.getCols();
                in_brlen = mcIn.getRowsPerBlock(); 

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ReorgMapFunction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ReorgMapFunction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ReorgMapFunction.java
index 111e3b9..8fada07 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ReorgMapFunction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/functions/ReorgMapFunction.java
@@ -39,9 +39,7 @@ public class ReorgMapFunction implements 
PairFunction<Tuple2<MatrixIndexes, Matr
        private ReorgOperator _reorgOp = null;
        private IndexFunction _indexFnObject = null;
        
-       public ReorgMapFunction(String opcode) 
-                       throws DMLRuntimeException 
-       {
+       public ReorgMapFunction(String opcode) {
                if(opcode.equalsIgnoreCase("r'")) {
                        _indexFnObject = SwapIndex.getSwapIndexFnObject();
                }

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/FrameRDDConverterUtils.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/FrameRDDConverterUtils.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/FrameRDDConverterUtils.java
index ad408a1..05d7791 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/FrameRDDConverterUtils.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/FrameRDDConverterUtils.java
@@ -81,9 +81,7 @@ public class FrameRDDConverterUtils
 
        public static JavaPairRDD<Long, FrameBlock> 
csvToBinaryBlock(JavaSparkContext sc,
                        JavaPairRDD<LongWritable, Text> input, 
MatrixCharacteristics mc, ValueType[] schema,
-                       boolean hasHeader, String delim, boolean fill, double 
fillValue) 
-               throws DMLRuntimeException 
-       {
+                       boolean hasHeader, String delim, boolean fill, double 
fillValue) {
                //determine unknown dimensions and sparsity if required
                if( !mc.dimsKnown() ) { //nnz irrelevant here
                        JavaRDD<String> tmp = input.values()
@@ -114,9 +112,7 @@ public class FrameRDDConverterUtils
 
        public static JavaPairRDD<Long, FrameBlock> 
csvToBinaryBlock(JavaSparkContext sc,
                        JavaRDD<String> input, MatrixCharacteristics mcOut, 
ValueType[] schema,
-                       boolean hasHeader, String delim, boolean fill, double 
fillValue) 
-               throws DMLRuntimeException 
-       {
+                       boolean hasHeader, String delim, boolean fill, double 
fillValue) {
                //convert string rdd to serializable longwritable/text
                JavaPairRDD<LongWritable, Text> prepinput =
                                input.mapToPair(new StringToSerTextFunction());
@@ -145,21 +141,16 @@ public class FrameRDDConverterUtils
        // Text cell <--> Binary block
 
        public static JavaPairRDD<Long, FrameBlock> 
textCellToBinaryBlock(JavaSparkContext sc,
-                       JavaPairRDD<LongWritable, Text> in, 
MatrixCharacteristics mcOut, ValueType[] schema ) 
-               throws DMLRuntimeException  
-       {
+                       JavaPairRDD<LongWritable, Text> in, 
MatrixCharacteristics mcOut, ValueType[] schema ) {
                //convert input rdd to serializable long/frame block
                JavaPairRDD<Long,Text> input = 
                                in.mapToPair(new 
LongWritableTextToLongTextFunction());
-               
                //do actual conversion
                return textCellToBinaryBlockLongIndex(sc, input, mcOut, schema);
        }
 
        public static JavaPairRDD<Long, FrameBlock> 
textCellToBinaryBlockLongIndex(JavaSparkContext sc,
-                       JavaPairRDD<Long, Text> input, MatrixCharacteristics 
mc, ValueType[] schema ) 
-               throws DMLRuntimeException  
-       {
+                       JavaPairRDD<Long, Text> input, MatrixCharacteristics 
mc, ValueType[] schema ) {
                //prepare default schema if needed
                if( schema == null || schema.length==1 ) {
                        schema = UtilFunctions.nCopies((int)mc.getCols(), 
@@ -174,9 +165,7 @@ public class FrameRDDConverterUtils
                return FrameRDDAggregateUtils.mergeByKey( output ); 
        }
 
-       public static JavaRDD<String> binaryBlockToTextCell(JavaPairRDD<Long, 
FrameBlock> input, MatrixCharacteristics mcIn) 
-               throws DMLRuntimeException 
-       {
+       public static JavaRDD<String> binaryBlockToTextCell(JavaPairRDD<Long, 
FrameBlock> input, MatrixCharacteristics mcIn) {
                //convert frame blocks to ijv string triples  
                return input.flatMap(new ConvertFrameBlockToIJVLines());
        }
@@ -185,18 +174,14 @@ public class FrameRDDConverterUtils
        // Matrix block <--> Binary block
 
        public static JavaPairRDD<LongWritable, FrameBlock> 
matrixBlockToBinaryBlock(JavaSparkContext sc,
-                       JavaPairRDD<MatrixIndexes, MatrixBlock> input, 
MatrixCharacteristics mcIn)
-               throws DMLRuntimeException 
-       {
+                       JavaPairRDD<MatrixIndexes, MatrixBlock> input, 
MatrixCharacteristics mcIn) {
                //convert and map to serializable LongWritable/frame block
                return matrixBlockToBinaryBlockLongIndex(sc,input, mcIn)
                        .mapToPair(new LongFrameToLongWritableFrameFunction());
        }
 
        public static JavaPairRDD<Long, FrameBlock> 
matrixBlockToBinaryBlockLongIndex(JavaSparkContext sc,
-                       JavaPairRDD<MatrixIndexes, MatrixBlock> input, 
MatrixCharacteristics mcIn)
-               throws DMLRuntimeException 
-       {
+                       JavaPairRDD<MatrixIndexes, MatrixBlock> input, 
MatrixCharacteristics mcIn) {
                JavaPairRDD<MatrixIndexes, MatrixBlock> in = input;
                MatrixCharacteristics mc = new MatrixCharacteristics(mcIn);
                
@@ -230,16 +215,12 @@ public class FrameRDDConverterUtils
        // DataFrame <--> Binary block
 
        public static JavaPairRDD<Long, FrameBlock> 
dataFrameToBinaryBlock(JavaSparkContext sc,
-                       Dataset<Row> df, MatrixCharacteristics mc, boolean 
containsID) 
-               throws DMLRuntimeException 
-       {
+                       Dataset<Row> df, MatrixCharacteristics mc, boolean 
containsID) {
                return dataFrameToBinaryBlock(sc, df, mc, containsID, new 
Pair<String[], ValueType[]>());
        }
 
        public static JavaPairRDD<Long, FrameBlock> 
dataFrameToBinaryBlock(JavaSparkContext sc,
-                       Dataset<Row> df, MatrixCharacteristics mc, boolean 
containsID, Pair<String[],ValueType[]> out) 
-               throws DMLRuntimeException 
-       {
+                       Dataset<Row> df, MatrixCharacteristics mc, boolean 
containsID, Pair<String[],ValueType[]> out) {
                //determine unknown dimensions if required
                if( !mc.dimsKnown() ) { //nnz are irrelevant here
                        int colVect = getColVectFromDFSchema(df.schema(), 
containsID);
@@ -646,9 +627,7 @@ public class FrameRDDConverterUtils
                        return fb;
                }
 
-               private static void flushBlocksToList( Long ix, FrameBlock fb, 
ArrayList<Tuple2<Long,FrameBlock>> ret ) 
-                       throws DMLRuntimeException
-               {
+               private static void flushBlocksToList( Long ix, FrameBlock fb, 
ArrayList<Tuple2<Long,FrameBlock>> ret ) {
                        if( fb != null && fb.getNumRows()>=0 )
                                ret.add(new Tuple2<>(ix, fb));
                }
@@ -786,9 +765,7 @@ public class FrameRDDConverterUtils
                        return ret.iterator();
                }
 
-               private static void flushBlocksToList( Long ix, FrameBlock fb, 
ArrayList<Tuple2<Long,FrameBlock>> ret ) 
-                       throws DMLRuntimeException
-               {
+               private static void flushBlocksToList( Long ix, FrameBlock fb, 
ArrayList<Tuple2<Long,FrameBlock>> ret ) {
                        if( fb != null && fb.getNumRows()>=0 )
                                ret.add(new Tuple2<>(ix, fb));
                }

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtils.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtils.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtils.java
index 9ed2f00..6e647ee 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtils.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtils.java
@@ -79,9 +79,7 @@ public class RDDConverterUtils
        public static final String DF_ID_COLUMN = "__INDEX";
 
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
textCellToBinaryBlock(JavaSparkContext sc,
-                       JavaPairRDD<LongWritable, Text> input, 
MatrixCharacteristics mcOut, boolean outputEmptyBlocks) 
-               throws DMLRuntimeException  
-       {
+                       JavaPairRDD<LongWritable, Text> input, 
MatrixCharacteristics mcOut, boolean outputEmptyBlocks) {
                //convert textcell rdd to binary block rdd (w/ partial blocks)
                JavaPairRDD<MatrixIndexes, MatrixBlock> out = input.values()
                                .mapPartitionsToPair(new 
TextToBinaryBlockFunction(mcOut));
@@ -99,9 +97,7 @@ public class RDDConverterUtils
        }
 
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
binaryCellToBinaryBlock(JavaSparkContext sc,
-                       JavaPairRDD<MatrixIndexes, MatrixCell> input, 
MatrixCharacteristics mcOut, boolean outputEmptyBlocks) 
-               throws DMLRuntimeException 
-       {       
+                       JavaPairRDD<MatrixIndexes, MatrixCell> input, 
MatrixCharacteristics mcOut, boolean outputEmptyBlocks) {
                //convert binarycell rdd to binary block rdd (w/ partial blocks)
                JavaPairRDD<MatrixIndexes, MatrixBlock> out = input
                                .mapPartitionsToPair(new 
BinaryCellToBinaryBlockFunction(mcOut));
@@ -167,9 +163,7 @@ public class RDDConverterUtils
 
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
csvToBinaryBlock(JavaSparkContext sc,
                        JavaPairRDD<LongWritable, Text> input, 
MatrixCharacteristics mc, 
-                       boolean hasHeader, String delim, boolean fill, double 
fillValue) 
-               throws DMLRuntimeException 
-       {
+                       boolean hasHeader, String delim, boolean fill, double 
fillValue) {
                //determine unknown dimensions and sparsity if required
                if( !mc.dimsKnown(true) ) {
                        LongAccumulator aNnz = sc.sc().longAccumulator("nnz");
@@ -217,12 +211,10 @@ public class RDDConverterUtils
         * @param fill if true, fill in empty values with fillValue
         * @param fillValue fill value used to fill empty values
         * @return matrix as {@code JavaPairRDD<MatrixIndexes, MatrixBlock>}
-        * @throws DMLRuntimeException if DMLRuntimeException occurs
         */
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
csvToBinaryBlock(JavaSparkContext sc,
                        JavaRDD<String> input, MatrixCharacteristics mcOut, 
                        boolean hasHeader, String delim, boolean fill, double 
fillValue) 
-               throws DMLRuntimeException 
        {
                //convert string rdd to serializable longwritable/text
                JavaPairRDD<LongWritable, Text> prepinput =
@@ -317,11 +309,9 @@ public class RDDConverterUtils
         * @param pathX path to binary block output file of features
         * @param pathY path to binary block output file of labels
         * @param mcOutX matrix characteristics of output matrix X
-        * @throws DMLRuntimeException if output path not writable or 
conversion failure
         */
        public static void libsvmToBinaryBlock(JavaSparkContext sc, String 
pathIn, 
                        String pathX, String pathY, MatrixCharacteristics 
mcOutX) 
-               throws DMLRuntimeException
        {
                if( !mcOutX.dimsKnown() )
                        throw new DMLRuntimeException("Matrix characteristics "
@@ -774,15 +764,13 @@ public class RDDConverterUtils
                }
                
                // Flushes current state of filled column blocks to output list.
-               private static void flushBlocksToList( MatrixIndexes[] ix, 
MatrixBlock[] mb, ArrayList<Tuple2<MatrixIndexes,MatrixBlock>> ret ) 
-                       throws DMLRuntimeException
-               {
-                       int len = ix.length;                    
+               private static void flushBlocksToList( MatrixIndexes[] ix, 
MatrixBlock[] mb, ArrayList<Tuple2<MatrixIndexes,MatrixBlock>> ret ) {
+                       int len = ix.length;
                        for( int i=0; i<len; i++ )
                                if( mb[i] != null ) {
                                        ret.add(new Tuple2<>(ix[i],mb[i]));
                                        mb[i].examSparsity(); //ensure right 
representation
-                               }       
+                               }
                }
        }
 
@@ -891,15 +879,13 @@ public class RDDConverterUtils
                }
                
                // Flushes current state of filled column blocks to output list.
-               private static void flushBlocksToList( MatrixIndexes[] ix, 
MatrixBlock[] mb, ArrayList<Tuple2<MatrixIndexes,MatrixBlock>> ret ) 
-                       throws DMLRuntimeException
-               {
+               private static void flushBlocksToList( MatrixIndexes[] ix, 
MatrixBlock[] mb, ArrayList<Tuple2<MatrixIndexes,MatrixBlock>> ret ) {
                        int len = ix.length;
                        for( int i=0; i<len; i++ )
                                if( mb[i] != null ) {
                                        ret.add(new Tuple2<>(ix[i],mb[i]));
                                        mb[i].examSparsity(); //ensure right 
representation
-                               }       
+                               }
                }
        }
        
@@ -1125,9 +1111,7 @@ public class RDDConverterUtils
                }
                
                // Flushes current state of filled column blocks to output list.
-               private static void flushBlocksToList( MatrixIndexes[] ix, 
MatrixBlock[] mb, ArrayList<Tuple2<MatrixIndexes,MatrixBlock>> ret ) 
-                       throws DMLRuntimeException
-               {
+               private static void flushBlocksToList( MatrixIndexes[] ix, 
MatrixBlock[] mb, ArrayList<Tuple2<MatrixIndexes,MatrixBlock>> ret ) {
                        int len = ix.length;
                        for( int i=0; i<len; i++ )
                                if( mb[i] != null ) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtilsExt.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtilsExt.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtilsExt.java
index eddc971..77800e4 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtilsExt.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDConverterUtilsExt.java
@@ -93,10 +93,9 @@ public class RDDConverterUtilsExt
         * @param mcIn matrix characteristics
         * @param outputEmptyBlocks if true, inject empty blocks if necessary
         * @return matrix as {@code JavaPairRDD<MatrixIndexes, MatrixBlock>}
-        * @throws DMLRuntimeException if DMLRuntimeException occurs
         */
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
coordinateMatrixToBinaryBlock(JavaSparkContext sc,
-                       CoordinateMatrix input, MatrixCharacteristics mcIn, 
boolean outputEmptyBlocks) throws DMLRuntimeException
+                       CoordinateMatrix input, MatrixCharacteristics mcIn, 
boolean outputEmptyBlocks)
        {
                //convert matrix entry rdd to binary block rdd (w/ partial 
blocks)
                JavaPairRDD<MatrixIndexes, MatrixBlock> out = 
input.entries().toJavaRDD()
@@ -115,12 +114,11 @@ public class RDDConverterUtilsExt
        }
 
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
coordinateMatrixToBinaryBlock(SparkContext sc,
-                       CoordinateMatrix input, MatrixCharacteristics mcIn, 
boolean outputEmptyBlocks) throws DMLRuntimeException
-       {
+                       CoordinateMatrix input, MatrixCharacteristics mcIn, 
boolean outputEmptyBlocks) {
                return coordinateMatrixToBinaryBlock(new JavaSparkContext(sc), 
input, mcIn, true);
        }
 
-       public static Dataset<Row> projectColumns(Dataset<Row> df, 
ArrayList<String> columns) throws DMLRuntimeException {
+       public static Dataset<Row> projectColumns(Dataset<Row> df, 
ArrayList<String> columns) {
                ArrayList<String> columnToSelect = new ArrayList<String>();
                for(int i = 1; i < columns.size(); i++) {
                        columnToSelect.add(columns.get(i));
@@ -128,19 +126,19 @@ public class RDDConverterUtilsExt
                return df.select(columns.get(0), 
scala.collection.JavaConversions.asScalaBuffer(columnToSelect).toList());
        }
 
-       public static MatrixBlock convertPy4JArrayToMB(byte [] data, long rlen, 
long clen) throws DMLRuntimeException {
+       public static MatrixBlock convertPy4JArrayToMB(byte [] data, long rlen, 
long clen) {
                return convertPy4JArrayToMB(data, (int)rlen, (int)clen, false);
        }
 
-       public static MatrixBlock convertPy4JArrayToMB(byte [] data, int rlen, 
int clen) throws DMLRuntimeException {
+       public static MatrixBlock convertPy4JArrayToMB(byte [] data, int rlen, 
int clen) {
                return convertPy4JArrayToMB(data, rlen, clen, false);
        }
 
-       public static MatrixBlock convertSciPyCOOToMB(byte [] data, byte [] 
row, byte [] col, long rlen, long clen, long nnz) throws DMLRuntimeException {
+       public static MatrixBlock convertSciPyCOOToMB(byte [] data, byte [] 
row, byte [] col, long rlen, long clen, long nnz) {
                return convertSciPyCOOToMB(data, row, col, (int)rlen, 
(int)clen, (int)nnz);
        }
 
-       public static MatrixBlock convertSciPyCOOToMB(byte [] data, byte [] 
row, byte [] col, int rlen, int clen, int nnz) throws DMLRuntimeException {
+       public static MatrixBlock convertSciPyCOOToMB(byte [] data, byte [] 
row, byte [] col, int rlen, int clen, int nnz) {
                MatrixBlock mb = new MatrixBlock(rlen, clen, true);
                mb.allocateSparseRowsBlock(false);
                ByteBuffer buf1 = ByteBuffer.wrap(data);
@@ -160,7 +158,7 @@ public class RDDConverterUtilsExt
                return mb;
        }
 
-       public static MatrixBlock convertPy4JArrayToMB(byte [] data, long rlen, 
long clen, boolean isSparse) throws DMLRuntimeException {
+       public static MatrixBlock convertPy4JArrayToMB(byte [] data, long rlen, 
long clen, boolean isSparse) {
                return convertPy4JArrayToMB(data, (int) rlen, (int) clen, 
isSparse);
        }
 
@@ -169,35 +167,35 @@ public class RDDConverterUtilsExt
                ret.allocateBlock();
                return ret;
        }
-       public static MatrixBlock allocateDenseOrSparse(long rlen, long clen, 
boolean isSparse) throws DMLRuntimeException {
+       public static MatrixBlock allocateDenseOrSparse(long rlen, long clen, 
boolean isSparse) {
                if(rlen > Integer.MAX_VALUE || clen > Integer.MAX_VALUE) {
                        throw new DMLRuntimeException("Dimensions of matrix are 
too large to be passed via NumPy/SciPy:" + rlen + " X " + clen);
                }
                return allocateDenseOrSparse(rlen, clen, isSparse);
        }
 
-       public static void copyRowBlocks(MatrixBlock mb, int rowIndex, 
MatrixBlock ret, int numRowsPerBlock, int rlen, int clen) throws 
DMLRuntimeException {
+       public static void copyRowBlocks(MatrixBlock mb, int rowIndex, 
MatrixBlock ret, int numRowsPerBlock, int rlen, int clen) {
                copyRowBlocks(mb, (long)rowIndex, ret, (long)numRowsPerBlock, 
(long)rlen, (long)clen);
        }
-       public static void copyRowBlocks(MatrixBlock mb, long rowIndex, 
MatrixBlock ret, int numRowsPerBlock, int rlen, int clen) throws 
DMLRuntimeException {
+       public static void copyRowBlocks(MatrixBlock mb, long rowIndex, 
MatrixBlock ret, int numRowsPerBlock, int rlen, int clen) {
                copyRowBlocks(mb, (long)rowIndex, ret, (long)numRowsPerBlock, 
(long)rlen, (long)clen);
        }
-       public static void copyRowBlocks(MatrixBlock mb, int rowIndex, 
MatrixBlock ret, long numRowsPerBlock, long rlen, long clen) throws 
DMLRuntimeException {
+       public static void copyRowBlocks(MatrixBlock mb, int rowIndex, 
MatrixBlock ret, long numRowsPerBlock, long rlen, long clen) {
                copyRowBlocks(mb, (long)rowIndex, ret, (long)numRowsPerBlock, 
(long)rlen, (long)clen);
        }
-       public static void copyRowBlocks(MatrixBlock mb, long rowIndex, 
MatrixBlock ret, long numRowsPerBlock, long rlen, long clen) throws 
DMLRuntimeException {
+       public static void copyRowBlocks(MatrixBlock mb, long rowIndex, 
MatrixBlock ret, long numRowsPerBlock, long rlen, long clen) {
                // TODO: Double-check if synchronization is required here.
                // synchronized (RDDConverterUtilsExt.class) {
                        ret.copy((int)(rowIndex*numRowsPerBlock), 
(int)Math.min((rowIndex+1)*numRowsPerBlock-1, rlen-1), 0, (int)(clen-1), mb, 
false);
                // }
        }
 
-       public static void postProcessAfterCopying(MatrixBlock ret) throws 
DMLRuntimeException {
+       public static void postProcessAfterCopying(MatrixBlock ret) {
                ret.recomputeNonZeros();
                ret.examSparsity();
        }
 
-       public static MatrixBlock convertPy4JArrayToMB(byte [] data, int rlen, 
int clen, boolean isSparse) throws DMLRuntimeException {
+       public static MatrixBlock convertPy4JArrayToMB(byte [] data, int rlen, 
int clen, boolean isSparse) {
                MatrixBlock mb = new MatrixBlock(rlen, clen, isSparse, -1);
                if(isSparse) {
                        throw new DMLRuntimeException("Convertion to sparse 
format not supported");
@@ -219,7 +217,7 @@ public class RDDConverterUtilsExt
                return mb;
        }
 
-       public static byte [] convertMBtoPy4JDenseArr(MatrixBlock mb) throws 
DMLRuntimeException {
+       public static byte [] convertMBtoPy4JDenseArr(MatrixBlock mb) {
                byte [] ret = null;
                if(mb.isInSparseFormat()) {
                        mb.sparseToDense();
@@ -290,7 +288,7 @@ public class RDDConverterUtilsExt
                private static final long serialVersionUID = 
4907483236186747224L;
 
                private IJVToBinaryBlockFunctionHelper helper = null;
-               public MatrixEntryToBinaryBlockFunction(MatrixCharacteristics 
mc) throws DMLRuntimeException {
+               public MatrixEntryToBinaryBlockFunction(MatrixCharacteristics 
mc) {
                        helper = new IJVToBinaryBlockFunctionHelper(mc);
                }
 
@@ -312,19 +310,15 @@ public class RDDConverterUtilsExt
                private int _brlen = -1;
                private int _bclen = -1;
 
-               public IJVToBinaryBlockFunctionHelper(MatrixCharacteristics mc) 
throws DMLRuntimeException
-               {
-                       if(!mc.dimsKnown()) {
+               public IJVToBinaryBlockFunctionHelper(MatrixCharacteristics mc) 
{
+                       if(!mc.dimsKnown())
                                throw new DMLRuntimeException("The dimensions 
need to be known in given MatrixCharacteristics for given input RDD");
-                       }
                        _rlen = mc.getRows();
                        _clen = mc.getCols();
                        _brlen = mc.getRowsPerBlock();
                        _bclen = mc.getColsPerBlock();
-
                        //determine upper bounded buffer len
                        _bufflen = (int) Math.min(_rlen*_clen, BUFFER_SIZE);
-
                }
 
                // ----------------------------------------------------
@@ -422,12 +416,9 @@ public class RDDConverterUtilsExt
         *            dataframe of comma-separated row strings to convert to
         *            dataframe of ml.linalg.Vector rows
         * @return dataframe of ml.linalg.Vector rows
-        * @throws DMLRuntimeException
-        *             if DMLRuntimeException occurs
         */
        public static Dataset<Row> 
stringDataFrameToVectorDataFrame(SparkSession sparkSession, Dataset<Row> 
inputDF)
-                       throws DMLRuntimeException {
-
+       {
                StructField[] oldSchema = inputDF.schema().fields();
                StructField[] newSchema = new StructField[oldSchema.length];
                for (int i = 0; i < oldSchema.length; i++) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDSortUtils.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDSortUtils.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDSortUtils.java
index 8571963..ef2bca3 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDSortUtils.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/RDDSortUtils.java
@@ -34,7 +34,6 @@ import org.apache.spark.broadcast.Broadcast;
 
 import scala.Tuple2;
 
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.caching.MatrixObject.UpdateType;
 import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
 import 
org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
@@ -243,12 +242,10 @@ public class RDDSortUtils
         * @param sec spark execution context
         * @param r_op reorg operator
         * @return data as {@code JavaPairRDD<MatrixIndexes, MatrixBlock>}
-        * @throws DMLRuntimeException if DMLRuntimeException occurs
         */
        public static JavaPairRDD<MatrixIndexes, MatrixBlock> 
sortDataByValMemSort( JavaPairRDD<MatrixIndexes, MatrixBlock> val, 
                        JavaPairRDD<MatrixIndexes, MatrixBlock> data, boolean 
asc, long rlen, long clen, int brlen, int bclen, 
                        SparkExecutionContext sec, ReorgOperator r_op) 
-                                       throws DMLRuntimeException
        {
                //collect orderby column for in-memory sorting
                MatrixBlock inMatBlock = SparkExecutionContext

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
index b24d56f..546266b 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/spark/utils/SparkUtils.java
@@ -35,7 +35,6 @@ import org.apache.spark.api.java.function.PairFlatMapFunction;
 import org.apache.spark.storage.StorageLevel;
 import org.apache.sysml.hops.OptimizerUtils;
 import org.apache.sysml.lops.Checkpoint;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
 import 
org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
 import 
org.apache.sysml.runtime.instructions.spark.functions.CopyBinaryCellFunction;
@@ -165,7 +164,7 @@ public class SparkUtils
        }
 
        // This returns RDD with identifier as well as location
-       public static String getStartLineFromSparkDebugInfo(String line) throws 
DMLRuntimeException {
+       public static String getStartLineFromSparkDebugInfo(String line) {
                // To remove: (2)  -- Assumption: At max, 9 RDDs as input to 
transformation/action
                String withoutPrefix = line.substring(4, line.length());
                // To remove: [Disk Memory Deserialized 1x Replicated]

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/CleanupMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/matrix/CleanupMR.java 
b/src/main/java/org/apache/sysml/runtime/matrix/CleanupMR.java
index 118016a..1b5607e 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/CleanupMR.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/CleanupMR.java
@@ -107,7 +107,7 @@ public class CleanupMR
        }
 
        private static void writeCleanupTasksToFile(Path path, int numTasks)
-               throws DMLRuntimeException, IOException
+               throws IOException
        {
                FileSystem fs = IOUtilFunctions.getFileSystem(path);
                try( BufferedWriter br = new BufferedWriter(new 
OutputStreamWriter(fs.create(path,true))) ) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/DataPartitionMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/matrix/DataPartitionMR.java 
b/src/main/java/org/apache/sysml/runtime/matrix/DataPartitionMR.java
index 762ecde..98b9529 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/DataPartitionMR.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/DataPartitionMR.java
@@ -37,7 +37,7 @@ public class DataPartitionMR
                //prevent instantiation via private constructor
        }
        
-       public static JobReturn runJob(MRJobInstruction jobinst, MatrixObject[] 
inputMatrices, String shuffleInst, byte[] resultIndices, MatrixObject[] 
outputMatrices, int numReducers, int replication) throws DMLRuntimeException {
+       public static JobReturn runJob(MRJobInstruction jobinst, MatrixObject[] 
inputMatrices, String shuffleInst, byte[] resultIndices, MatrixObject[] 
outputMatrices, int numReducers, int replication) {
                MatrixCharacteristics[] sts = new 
MatrixCharacteristics[outputMatrices.length];
                
                processPartitionInstructions(shuffleInst, inputMatrices, 
resultIndices, outputMatrices, numReducers, replication, sts);
@@ -46,7 +46,7 @@ public class DataPartitionMR
                return ret;
        }
        
-       private static void processPartitionInstructions(String shuffleInst, 
MatrixObject[] inputMatrices, byte[] resultIndices, MatrixObject[] 
outputMatrices, int numReducers, int replication, MatrixCharacteristics[] sts) 
throws DMLRuntimeException {
+       private static void processPartitionInstructions(String shuffleInst, 
MatrixObject[] inputMatrices, byte[] resultIndices, MatrixObject[] 
outputMatrices, int numReducers, int replication, MatrixCharacteristics[] sts) {
                int i=0;
                for(String inst : 
shuffleInst.split(Instruction.INSTRUCTION_DELIM)) {
                        if( 
InstructionUtils.getOpCode(inst).equalsIgnoreCase("partition") ) {

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/GMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/matrix/GMR.java 
b/src/main/java/org/apache/sysml/runtime/matrix/GMR.java
index ae9cfce..d7c7b60 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/GMR.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/GMR.java
@@ -35,7 +35,6 @@ import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.lops.Lop;
 import org.apache.sysml.parser.Expression.DataType;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import 
org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat;
 import 
org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
 import org.apache.sysml.runtime.instructions.Instruction;
@@ -314,9 +313,7 @@ public class GMR
                return new JobReturn(stats, outputInfos, runjob.isSuccessful());
        }
 
-       private static boolean setupDistributedCache(JobConf job, String 
instMap, String instRed, String[] inputs, long[] rlens, long[] clens) 
-               throws DMLRuntimeException 
-       {
+       private static boolean setupDistributedCache(JobConf job, String 
instMap, String instRed, String[] inputs, long[] rlens, long[] clens) {
                //concatenate mapper and reducer instructions
                String allInsts = (instMap!=null && !instMap.trim().isEmpty() ) 
? instMap : null;
                if( allInsts==null )
@@ -386,11 +383,8 @@ public class GMR
         * @param inst3 instruction 3
         * @param inst4 instruction 4
         * @return array of byte indexes
-        * @throws DMLRuntimeException if DMLRuntimeException occurs
         */
-       private static boolean[] getDistCacheOnlyInputs(byte[] realIndexes, 
String inst1, String inst2, String inst3, String inst4) 
-               throws DMLRuntimeException
-       {
+       private static boolean[] getDistCacheOnlyInputs(byte[] realIndexes, 
String inst1, String inst2, String inst3, String inst4) {
                boolean[] ret = new boolean[realIndexes.length];
                String[] inst = new String[]{inst1, inst2, inst3, inst4};
                

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/JobReturn.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/matrix/JobReturn.java 
b/src/main/java/org/apache/sysml/runtime/matrix/JobReturn.java
index 3d846a7..bce2e03 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/JobReturn.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/JobReturn.java
@@ -44,8 +44,7 @@ public class JobReturn
                }
        }
 
-       public JobReturn(MatrixCharacteristics[] sts, OutputInfo[] infos,
-                       boolean success) throws DMLRuntimeException {
+       public JobReturn(MatrixCharacteristics[] sts, OutputInfo[] infos, 
boolean success) {
                successful = success;
                metadata = new MetaDataFormat[sts.length];
                for (int i = 0; i < sts.length; i++) {
@@ -53,7 +52,7 @@ public class JobReturn
                }
        }
 
-       public JobReturn(MatrixCharacteristics sts, OutputInfo info, boolean 
success) throws DMLRuntimeException {
+       public JobReturn(MatrixCharacteristics sts, OutputInfo info, boolean 
success) {
                successful = success;
                metadata = new MetaDataFormat[1];
                metadata[0] = new MetaDataFormat(sts, info, 
OutputInfo.getMatchingInputInfo(info));
@@ -65,7 +64,7 @@ public class JobReturn
                metadata[0] = new MetaDataNumItemsByEachReducer(mc, items, 
partition0, number0s);
        }
 
-       public boolean checkReturnStatus() throws DMLRuntimeException {
+       public boolean checkReturnStatus() {
                if( !successful )
                        throw new DMLRuntimeException("Error in executing the 
DML program.");
                return successful;

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/MatrixCharacteristics.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/matrix/MatrixCharacteristics.java 
b/src/main/java/org/apache/sysml/runtime/matrix/MatrixCharacteristics.java
index 0fd9f22..1443a8c 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/MatrixCharacteristics.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/MatrixCharacteristics.java
@@ -25,7 +25,6 @@ import java.util.Arrays;
 import java.util.HashMap;
 
 import org.apache.sysml.lops.MMTSJ.MMTSJType;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.instructions.mr.AggregateBinaryInstruction;
 import org.apache.sysml.runtime.instructions.mr.AggregateInstruction;
 import org.apache.sysml.runtime.instructions.mr.AggregateUnaryInstruction;
@@ -233,15 +232,11 @@ public class MatrixCharacteristics implements Serializable
                        || (nonZero < numRows*numColumns - singleBlk);
        }
        
-       public static void reorg(MatrixCharacteristics dim, ReorgOperator op, 
-                       MatrixCharacteristics dimOut) throws DMLRuntimeException
-       {
+       public static void reorg(MatrixCharacteristics dim, ReorgOperator op, 
MatrixCharacteristics dimOut) {
                op.fn.computeDimension(dim, dimOut);
        }
        
-       public static void aggregateUnary(MatrixCharacteristics dim, 
AggregateUnaryOperator op, 
-                       MatrixCharacteristics dimOut) throws DMLRuntimeException
-       {
+       public static void aggregateUnary(MatrixCharacteristics dim, 
AggregateUnaryOperator op, MatrixCharacteristics dimOut) {
                op.indexFn.computeDimension(dim, dimOut);
        }
        
@@ -252,9 +247,7 @@ public class MatrixCharacteristics implements Serializable
                dimOut.set(dim1.numRows, dim2.numColumns, dim1.numRowsPerBlock, 
dim2.numColumnsPerBlock);
        }
        
-       public static void computeDimension(HashMap<Byte, 
MatrixCharacteristics> dims, MRInstruction ins) 
-               throws DMLRuntimeException
-       {
+       public static void computeDimension(HashMap<Byte, 
MatrixCharacteristics> dims, MRInstruction ins) {
                MatrixCharacteristics dimOut=dims.get(ins.output);
                if(dimOut==null)
                {

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/SortMR.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/sysml/runtime/matrix/SortMR.java 
b/src/main/java/org/apache/sysml/runtime/matrix/SortMR.java
index d989721..d8b7aed 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/SortMR.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/SortMR.java
@@ -47,7 +47,6 @@ import org.apache.sysml.conf.ConfigurationManager;
 import org.apache.sysml.conf.DMLConfig;
 import org.apache.sysml.lops.Lop;
 import org.apache.sysml.lops.SortKeys;
-import org.apache.sysml.runtime.DMLRuntimeException;
 import 
org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
 import org.apache.sysml.runtime.instructions.InstructionUtils;
 import org.apache.sysml.runtime.instructions.MRJobInstruction;
@@ -308,9 +307,7 @@ public class SortMR
                }
        }
 
-       public static MRInstruction parseSortInstruction(String str) 
-               throws DMLRuntimeException 
-       {
+       public static MRInstruction parseSortInstruction(String str) {
                SortKeys.OperationTypes otype = 
SortMR.getSortInstructionType(str);
                if( otype != SortKeys.OperationTypes.Indexes )
                        return (MRInstruction) 
UnaryInstruction.parseInstruction(str);

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/data/CM_N_COVCell.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/matrix/data/CM_N_COVCell.java 
b/src/main/java/org/apache/sysml/runtime/matrix/data/CM_N_COVCell.java
index 65ef6ec..a2b17ec 100644
--- a/src/main/java/org/apache/sysml/runtime/matrix/data/CM_N_COVCell.java
+++ b/src/main/java/org/apache/sysml/runtime/matrix/data/CM_N_COVCell.java
@@ -51,21 +51,18 @@ public class CM_N_COVCell extends MatrixValue implements 
WritableComparable
        
        @Override
        public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op,
-                       MatrixValue result, int brlen, int bclen, MatrixIndexes 
indexesIn)
-                       throws DMLRuntimeException {
+                       MatrixValue result, int brlen, int bclen, MatrixIndexes 
indexesIn) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public MatrixValue binaryOperations(BinaryOperator op,
-                       MatrixValue thatValue, MatrixValue result)
-                       throws DMLRuntimeException {
+                       MatrixValue thatValue, MatrixValue result) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
-       public void binaryOperationsInPlace(BinaryOperator op, MatrixValue 
thatValue)
-                       throws DMLRuntimeException {
+       public void binaryOperationsInPlace(BinaryOperator op, MatrixValue 
thatValue) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
@@ -101,15 +98,13 @@ public class CM_N_COVCell extends MatrixValue implements 
WritableComparable
 
        @Override
        public void incrementalAggregate(AggregateOperator aggOp,
-                       MatrixValue correction, MatrixValue newWithCorrection)
-                       throws DMLRuntimeException {
+                       MatrixValue correction, MatrixValue newWithCorrection) {
                throw new RuntimeException("operation not supported for 
CM_N_COVCell");
        }
        
        @Override
        public void incrementalAggregate(AggregateOperator aggOp,
-                       MatrixValue newWithCorrection)
-                       throws DMLRuntimeException {
+                       MatrixValue newWithCorrection) {
                throw new RuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
@@ -125,8 +120,7 @@ public class CM_N_COVCell extends MatrixValue implements 
WritableComparable
 
        @Override
        public MatrixValue reorgOperations(ReorgOperator op, MatrixValue result,
-                       int startRow, int startColumn, int length)
-                       throws DMLRuntimeException {
+                       int startRow, int startColumn, int length) {
                throw new RuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
@@ -146,8 +140,7 @@ public class CM_N_COVCell extends MatrixValue implements 
WritableComparable
        public void reset(int rl, int cl, double v) {}
 
        @Override
-       public MatrixValue scalarOperations(ScalarOperator op, MatrixValue 
result)
-                       throws DMLRuntimeException {
+       public MatrixValue scalarOperations(ScalarOperator op, MatrixValue 
result) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
@@ -157,8 +150,7 @@ public class CM_N_COVCell extends MatrixValue implements 
WritableComparable
        }
 
        @Override
-       public MatrixValue unaryOperations(UnaryOperator op, MatrixValue result)
-                       throws DMLRuntimeException {
+       public MatrixValue unaryOperations(UnaryOperator op, MatrixValue 
result) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
@@ -230,72 +222,62 @@ public class CM_N_COVCell extends MatrixValue implements 
WritableComparable
        }
 
        @Override
-       public MatrixValue zeroOutOperations(MatrixValue result, IndexRange 
range, boolean complementary)
-                       throws DMLRuntimeException {
+       public MatrixValue zeroOutOperations(MatrixValue result, IndexRange 
range, boolean complementary) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public void ctableOperations(Operator op, MatrixValue that,
-                       MatrixValue that2, CTableMap resultMap, MatrixBlock 
resultBlock)
-                       throws DMLRuntimeException {
+                       MatrixValue that2, CTableMap resultMap, MatrixBlock 
resultBlock) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public void ctableOperations(Operator op, MatrixValue that,
-                       double scalarThat2, boolean ignoreZeros, CTableMap 
resultMap, MatrixBlock resultBlock)
-                       throws DMLRuntimeException {
+                       double scalarThat2, boolean ignoreZeros, CTableMap 
resultMap, MatrixBlock resultBlock) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public void ctableOperations(Operator op, double scalarThat,
-                       double scalarThat2, CTableMap resultMap, MatrixBlock 
resultBlock)
-                       throws DMLRuntimeException {
+                       double scalarThat2, CTableMap resultMap, MatrixBlock 
resultBlock) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
        
        @Override
        public void ctableOperations(Operator op, MatrixIndexes ix1, double 
scalarThat, boolean left, int brlen,
-                       CTableMap resultMap, MatrixBlock resultBlock)
-                       throws DMLRuntimeException {
+                       CTableMap resultMap, MatrixBlock resultBlock) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public void ctableOperations(Operator op, double scalarThat,
-                       MatrixValue that2, CTableMap resultMap, MatrixBlock 
resultBlock)
-                       throws DMLRuntimeException {
+                       MatrixValue that2, CTableMap resultMap, MatrixBlock 
resultBlock) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public void slice(ArrayList<IndexedMatrixValue> outlist,
                        IndexRange range, int rowCut, int colCut, int 
blockRowFactor,
-                       int blockColFactor, int boundaryRlen, int boundaryClen)
-                       throws DMLRuntimeException {
+                       int blockColFactor, int boundaryRlen, int boundaryClen) 
{
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
        
        @Override
-       public MatrixValue replaceOperations(MatrixValue result, double 
pattern, double replacement)
-                       throws DMLRuntimeException {
+       public MatrixValue replaceOperations(MatrixValue result, double 
pattern, double replacement) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op,
                        MatrixValue result, int blockingFactorRow, int 
blockingFactorCol,
-                       MatrixIndexes indexesIn, boolean inCP)
-                       throws DMLRuntimeException {
+                       MatrixIndexes indexesIn, boolean inCP) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 
        @Override
        public void append(MatrixValue valueIn2, ArrayList<IndexedMatrixValue> 
outlist,
-                       int blockRowFactor, int blockColFactor, boolean cbind, 
boolean m2IsLast, int nextNCol)
-       throws DMLRuntimeException   {
+                       int blockRowFactor, int blockColFactor, boolean cbind, 
boolean m2IsLast, int nextNCol) {
                throw new DMLRuntimeException("operation not supported for 
CM_N_COVCell");
        }
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/data/ConvolutionParameters.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/matrix/data/ConvolutionParameters.java 
b/src/main/java/org/apache/sysml/runtime/matrix/data/ConvolutionParameters.java
index e32b7bc..99d21a2 100644
--- 
a/src/main/java/org/apache/sysml/runtime/matrix/data/ConvolutionParameters.java
+++ 
b/src/main/java/org/apache/sysml/runtime/matrix/data/ConvolutionParameters.java
@@ -49,7 +49,7 @@ public class ConvolutionParameters implements Serializable
        
        public ConvolutionParameters(long N, long C, long H, long W,
                        long K, long R, long S, long stride_h, long stride_w, 
-                       long pad_h, long pad_w, int numThreads) throws 
DMLRuntimeException {
+                       long pad_h, long pad_w, int numThreads) {
                this.N = convertToInt(N);
                this.C = convertToInt(C);
                this.H = convertToInt(H);
@@ -100,7 +100,7 @@ public class ConvolutionParameters implements Serializable
                this.numThreads = numThreads;
        }
        
-       private static int convertToInt(long val) throws DMLRuntimeException {
+       private static int convertToInt(long val) {
                if( val > Integer.MAX_VALUE )
                        throw new DMLRuntimeException("The value for 
ConvolutionParameters is too large:" + val);
                return (int) val;
@@ -123,7 +123,7 @@ public class ConvolutionParameters implements Serializable
        }
        
        public void setIfUnknown(Hop N, Hop C, Hop H, Hop W,
-                       Hop K, Hop R, Hop S, Hop stride_h, Hop stride_w, Hop 
pad_h, Hop pad_w, int numThreads) throws DMLRuntimeException {
+                       Hop K, Hop R, Hop S, Hop stride_h, Hop stride_w, Hop 
pad_h, Hop pad_w, int numThreads) {
                if(this.N < 0) this.N = 
convertToInt(Hop.computeSizeInformation(N));
                if(this.C < 0) this.C = 
convertToInt(Hop.computeSizeInformation(C));
                if(this.H < 0) this.H = 
convertToInt(Hop.computeSizeInformation(H));

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/data/CudaSupportFunctions.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/matrix/data/CudaSupportFunctions.java 
b/src/main/java/org/apache/sysml/runtime/matrix/data/CudaSupportFunctions.java
index 52822fe..80e9bec 100644
--- 
a/src/main/java/org/apache/sysml/runtime/matrix/data/CudaSupportFunctions.java
+++ 
b/src/main/java/org/apache/sysml/runtime/matrix/data/CudaSupportFunctions.java
@@ -17,7 +17,6 @@
  */
 package org.apache.sysml.runtime.matrix.data;
 
-import org.apache.sysml.runtime.DMLRuntimeException;
 import org.apache.sysml.runtime.instructions.gpu.context.GPUContext;
 
 import jcuda.jcublas.cublasHandle;
@@ -81,7 +80,7 @@ public interface CudaSupportFunctions {
        public int cusparsecsr2dense(cusparseHandle handle, int m, int n, 
cusparseMatDescr descrA, jcuda.Pointer csrValA, jcuda.Pointer csrRowPtrA, 
jcuda.Pointer csrColIndA, jcuda.Pointer A, int lda) ;
        public int cusparsedense2csr(cusparseHandle handle, int m, int n, 
cusparseMatDescr descrA, jcuda.Pointer A, int lda, jcuda.Pointer nnzPerRow, 
jcuda.Pointer csrValA, jcuda.Pointer csrRowPtrA, jcuda.Pointer csrColIndA);
        public int cusparsennz(cusparseHandle handle, int dirA, int m, int n, 
cusparseMatDescr descrA, jcuda.Pointer A, int lda, jcuda.Pointer nnzPerRowCol, 
jcuda.Pointer nnzTotalDevHostPtr);
-       public void deviceToHost(GPUContext gCtx, Pointer src, double [] dest, 
String instName, boolean isEviction) throws DMLRuntimeException;
-       public void hostToDevice(GPUContext gCtx, double [] src,  Pointer dest, 
String instName) throws DMLRuntimeException;
+       public void deviceToHost(GPUContext gCtx, Pointer src, double [] dest, 
String instName, boolean isEviction);
+       public void hostToDevice(GPUContext gCtx, double [] src,  Pointer dest, 
String instName);
        
 }

http://git-wip-us.apache.org/repos/asf/systemml/blob/1f323976/src/main/java/org/apache/sysml/runtime/matrix/data/DoublePrecisionCudaSupportFunctions.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/matrix/data/DoublePrecisionCudaSupportFunctions.java
 
b/src/main/java/org/apache/sysml/runtime/matrix/data/DoublePrecisionCudaSupportFunctions.java
index f31806d..8b7a06f 100644
--- 
a/src/main/java/org/apache/sysml/runtime/matrix/data/DoublePrecisionCudaSupportFunctions.java
+++ 
b/src/main/java/org/apache/sysml/runtime/matrix/data/DoublePrecisionCudaSupportFunctions.java
@@ -163,7 +163,7 @@ public class DoublePrecisionCudaSupportFunctions implements 
CudaSupportFunctions
        }
 
        @Override
-       public void deviceToHost(GPUContext gCtx, Pointer src, double[] dest, 
String instName, boolean isEviction) throws DMLRuntimeException {
+       public void deviceToHost(GPUContext gCtx, Pointer src, double[] dest, 
String instName, boolean isEviction) {
                long t1 = DMLScript.FINEGRAINED_STATISTICS  && instName != 
null? System.nanoTime() : 0;
                if(src == null)
                        throw new DMLRuntimeException("The source pointer in 
deviceToHost is null");
@@ -178,7 +178,7 @@ public class DoublePrecisionCudaSupportFunctions implements 
CudaSupportFunctions
        }
 
        @Override
-       public void hostToDevice(GPUContext gCtx, double[] src, Pointer dest, 
String instName) throws DMLRuntimeException {
+       public void hostToDevice(GPUContext gCtx, double[] src, Pointer dest, 
String instName) {
                long t1 = DMLScript.FINEGRAINED_STATISTICS  && instName != 
null? System.nanoTime() : 0;
                cudaMemcpy(dest, Pointer.to(src), 
((long)src.length)*Sizeof.DOUBLE, cudaMemcpyHostToDevice);
                if(DMLScript.FINEGRAINED_STATISTICS && instName != null) 

Reply via email to