Repository: incubator-systemml
Updated Branches:
  refs/heads/master 085009a36 -> f9fbcb76f


[HOTFIX] Fix build of scala examples and function call error handling

Project: http://git-wip-us.apache.org/repos/asf/incubator-systemml/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-systemml/commit/f9fbcb76
Tree: http://git-wip-us.apache.org/repos/asf/incubator-systemml/tree/f9fbcb76
Diff: http://git-wip-us.apache.org/repos/asf/incubator-systemml/diff/f9fbcb76

Branch: refs/heads/master
Commit: f9fbcb76f2ad280e3a8a3f26d6d9221ccbaeaae0
Parents: 085009a
Author: Matthias Boehm <mbo...@us.ibm.com>
Authored: Thu Sep 15 11:12:21 2016 +0200
Committer: Matthias Boehm <mbo...@us.ibm.com>
Committed: Thu Sep 15 11:12:21 2016 +0200

----------------------------------------------------------------------
 .../instructions/cp/FunctionCallCPInstruction.java        |  2 +-
 .../org/apache/sysml/api/ml/BaseSystemMLClassifier.scala  | 10 +++++-----
 .../org/apache/sysml/api/ml/BaseSystemMLRegressor.scala   |  8 ++++----
 3 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/f9fbcb76/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
index 529bd25..917a7f2 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/cp/FunctionCallCPInstruction.java
@@ -151,7 +151,7 @@ public class FunctionCallCPInstruction extends CPInstruction
                                CPOperand operand = 
_boundInputParamOperands.get(i);
                                String varname = operand.getName();
                                //error handling non-existing variables
-                               if( !operand.isLiteral() && 
ec.containsVariable(varname) ) {
+                               if( !operand.isLiteral() && 
!ec.containsVariable(varname) ) {
                                        throw new DMLRuntimeException("Input 
variable '"+varname+"' not existing on call of " + 
                                                        
DMLProgram.constructFunctionKey(_namespace, _functionName) + " (line 
"+getLineNum()+").");
                                }

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/f9fbcb76/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLClassifier.scala
----------------------------------------------------------------------
diff --git 
a/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLClassifier.scala 
b/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLClassifier.scala
index c9c05e0..7415109 100644
--- a/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLClassifier.scala
+++ b/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLClassifier.scala
@@ -28,7 +28,7 @@ import org.apache.spark.ml.param.{ Params, Param, ParamMap, 
DoubleParam }
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics
 import org.apache.sysml.runtime.matrix.data.MatrixBlock
 import org.apache.sysml.runtime.DMLRuntimeException
-import org.apache.sysml.runtime.instructions.spark.utils.{ 
RDDConverterUtilsExt => RDDConverterUtils }
+import org.apache.sysml.runtime.instructions.spark.utils.{ 
RDDConverterUtilsExt, RDDConverterUtils }
 import org.apache.sysml.api.mlcontext._
 import org.apache.sysml.api.mlcontext.ScriptFactory._
 import org.apache.spark.sql._
@@ -110,7 +110,7 @@ trait BaseSystemMLClassifier extends BaseSystemMLEstimator {
     val isSingleNode = false
     val ml = new MLContext(df.rdd.sparkContext)
     val mcXin = new MatrixCharacteristics()
-    val Xin = RDDConverterUtils.vectorDataFrameToBinaryBlock(sc, 
df.asInstanceOf[DataFrame], mcXin, false, "features")
+    val Xin = RDDConverterUtils.dataFrameToBinaryBlock(sc, 
df.asInstanceOf[DataFrame], mcXin, false, true)
     val revLabelMapping = new java.util.HashMap[Int, String]
     val yin = PredictionUtils.fillLabelMapping(df, revLabelMapping)
     val ret = getTrainingScript(isSingleNode)
@@ -142,7 +142,7 @@ trait BaseSystemMLClassifierModel extends 
BaseSystemMLEstimatorModel {
     val isSingleNode = false
     val ml = new MLContext(sc)
     val mcXin = new MatrixCharacteristics()
-    val Xin = 
RDDConverterUtils.vectorDataFrameToBinaryBlock(df.rdd.sparkContext, 
df.asInstanceOf[DataFrame], mcXin, false, "features")
+    val Xin = RDDConverterUtils.dataFrameToBinaryBlock(df.rdd.sparkContext, 
df.asInstanceOf[DataFrame], mcXin, false, true)
     val script = getPredictionScript(mloutput, isSingleNode)
     val Xin_bin = new BinaryBlockMatrix(Xin, mcXin)
     val modelPredict = ml.execute(script._1.in(script._2, Xin_bin))
@@ -150,11 +150,11 @@ trait BaseSystemMLClassifierModel extends 
BaseSystemMLEstimatorModel {
     val predictedDF = PredictionUtils.updateLabels(isSingleNode, 
predLabelOut.getDataFrame("Prediction"), null, "C1", 
labelMapping).select("__INDEX", "prediction")
     if(outputProb) {
       val prob = modelPredict.getDataFrame(probVar, 
true).withColumnRenamed("C1", "probability").select("__INDEX", "probability")
-      val dataset = 
RDDConverterUtils.addIDToDataFrame(df.asInstanceOf[DataFrame], df.sqlContext, 
"__INDEX")
+      val dataset = 
RDDConverterUtilsExt.addIDToDataFrame(df.asInstanceOf[DataFrame], 
df.sqlContext, "__INDEX")
       return PredictionUtils.joinUsingID(dataset, 
PredictionUtils.joinUsingID(prob, predictedDF))
     }
     else {
-      val dataset = 
RDDConverterUtils.addIDToDataFrame(df.asInstanceOf[DataFrame], df.sqlContext, 
"__INDEX")
+      val dataset = 
RDDConverterUtilsExt.addIDToDataFrame(df.asInstanceOf[DataFrame], 
df.sqlContext, "__INDEX")
       return PredictionUtils.joinUsingID(dataset, predictedDF)
     }
     

http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/f9fbcb76/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLRegressor.scala
----------------------------------------------------------------------
diff --git a/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLRegressor.scala 
b/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLRegressor.scala
index 73bf9be..ed0fabb 100644
--- a/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLRegressor.scala
+++ b/src/main/scala/org/apache/sysml/api/ml/BaseSystemMLRegressor.scala
@@ -29,7 +29,7 @@ import org.apache.spark.ml.param.{ Params, Param, ParamMap, 
DoubleParam }
 import org.apache.sysml.runtime.matrix.MatrixCharacteristics
 import org.apache.sysml.runtime.matrix.data.MatrixBlock
 import org.apache.sysml.runtime.DMLRuntimeException
-import org.apache.sysml.runtime.instructions.spark.utils.{ 
RDDConverterUtilsExt => RDDConverterUtils }
+import org.apache.sysml.runtime.instructions.spark.utils.{ 
RDDConverterUtilsExt, RDDConverterUtils }
 import org.apache.sysml.api.mlcontext._
 import org.apache.sysml.api.mlcontext.ScriptFactory._
 
@@ -47,7 +47,7 @@ trait BaseSystemMLRegressor extends BaseSystemMLEstimator {
     val isSingleNode = false
     val ml = new MLContext(df.rdd.sparkContext)
     val mcXin = new MatrixCharacteristics()
-    val Xin = RDDConverterUtils.vectorDataFrameToBinaryBlock(sc, 
df.asInstanceOf[DataFrame], mcXin, false, "features")
+    val Xin = RDDConverterUtils.dataFrameToBinaryBlock(sc, 
df.asInstanceOf[DataFrame], mcXin, false, true)
     val yin = df.select("label")
     val ret = getTrainingScript(isSingleNode)
     val Xbin = new BinaryBlockMatrix(Xin, mcXin)
@@ -75,12 +75,12 @@ trait BaseSystemMLRegressorModel extends 
BaseSystemMLEstimatorModel {
     val isSingleNode = false
     val ml = new MLContext(sc)
     val mcXin = new MatrixCharacteristics()
-    val Xin = 
RDDConverterUtils.vectorDataFrameToBinaryBlock(df.rdd.sparkContext, 
df.asInstanceOf[DataFrame], mcXin, false, "features")
+    val Xin = RDDConverterUtils.dataFrameToBinaryBlock(df.rdd.sparkContext, 
df.asInstanceOf[DataFrame], mcXin, false, true)
     val script = getPredictionScript(mloutput, isSingleNode)
     val Xin_bin = new BinaryBlockMatrix(Xin, mcXin)
     val modelPredict = ml.execute(script._1.in(script._2, Xin_bin))
     val predictedDF = 
modelPredict.getDataFrame(predictionVar).select("__INDEX", 
"C1").withColumnRenamed("C1", "prediction")
-    val dataset = 
RDDConverterUtils.addIDToDataFrame(df.asInstanceOf[DataFrame], df.sqlContext, 
"__INDEX")
+    val dataset = 
RDDConverterUtilsExt.addIDToDataFrame(df.asInstanceOf[DataFrame], 
df.sqlContext, "__INDEX")
     return PredictionUtils.joinUsingID(dataset, predictedDF)
   }
 }
\ No newline at end of file

Reply via email to