Repository: spark
Updated Branches:
  refs/heads/branch-2.0 dd70a115c -> 22b4072e7


[SPARK-16245][ML] model loading backward compatibility for ml.feature.PCA

## What changes were proposed in this pull request?
model loading backward compatibility for ml.feature.PCA.

## How was this patch tested?
existing ut and manual test for loading models saved by Spark 1.6.

Author: Yanbo Liang <yblia...@gmail.com>

Closes #13937 from yanboliang/spark-16245.

(cherry picked from commit 0df5ce1bc1387a58b33cd185008f4022bd3dcc69)
Signed-off-by: Xiangrui Meng <m...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/22b4072e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/22b4072e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/22b4072e

Branch: refs/heads/branch-2.0
Commit: 22b4072e704f9a68a605e9a4cebf54d2122fe448
Parents: dd70a11
Author: Yanbo Liang <yblia...@gmail.com>
Authored: Tue Jun 28 19:53:07 2016 -0700
Committer: Xiangrui Meng <m...@databricks.com>
Committed: Tue Jun 28 19:53:16 2016 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/ml/feature/PCA.scala   | 18 ++++++++----------
 1 file changed, 8 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/22b4072e/mllib/src/main/scala/org/apache/spark/ml/feature/PCA.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/PCA.scala 
b/mllib/src/main/scala/org/apache/spark/ml/feature/PCA.scala
index 72167b5..ef8b085 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/feature/PCA.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/feature/PCA.scala
@@ -206,24 +206,22 @@ object PCAModel extends MLReadable[PCAModel] {
     override def load(path: String): PCAModel = {
       val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
 
-      // explainedVariance field is not present in Spark <= 1.6
-      val versionRegex = "([0-9]+)\\.([0-9]+).*".r
-      val hasExplainedVariance = metadata.sparkVersion match {
-        case versionRegex(major, minor) =>
-          major.toInt >= 2 || (major.toInt == 1 && minor.toInt > 6)
-        case _ => false
-      }
+      val versionRegex = "([0-9]+)\\.(.+)".r
+      val versionRegex(major, _) = metadata.sparkVersion
 
       val dataPath = new Path(path, "data").toString
-      val model = if (hasExplainedVariance) {
+      val model = if (major.toInt >= 2) {
         val Row(pc: DenseMatrix, explainedVariance: DenseVector) =
           sparkSession.read.parquet(dataPath)
             .select("pc", "explainedVariance")
             .head()
         new PCAModel(metadata.uid, pc, explainedVariance)
       } else {
-        val Row(pc: DenseMatrix) = 
sparkSession.read.parquet(dataPath).select("pc").head()
-        new PCAModel(metadata.uid, pc, 
Vectors.dense(Array.empty[Double]).asInstanceOf[DenseVector])
+        // pc field is the old matrix format in Spark <= 1.6
+        // explainedVariance field is not present in Spark <= 1.6
+        val Row(pc: OldDenseMatrix) = 
sparkSession.read.parquet(dataPath).select("pc").head()
+        new PCAModel(metadata.uid, pc.asML,
+          Vectors.dense(Array.empty[Double]).asInstanceOf[DenseVector])
       }
       DefaultParamsReader.getAndSetParams(model, metadata)
       model


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to