Repository: spark
Updated Branches:
  refs/heads/master de8a03e68 -> 3a43ae7c0


[SPARK-18613][ML] make spark.mllib LDA dependencies in spark.ml LDA private

## What changes were proposed in this pull request?
spark.ml.*LDAModel classes were exposing spark.mllib LDA models via protected 
methods. Made them package (clustering) private.

## How was this patch tested?
```
build/sbt doc  # "millib.clustering" no longer appears in the docs for *LDA* 
classes
build/sbt compile  # compiles
build/sbt
> mllib/testOnly   # tests pass
```

Author: sueann <sue...@databricks.com>

Closes #16860 from sueann/SPARK-18613.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3a43ae7c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3a43ae7c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3a43ae7c

Branch: refs/heads/master
Commit: 3a43ae7c0bbce8eda98f50a97a0138f860197a98
Parents: de8a03e
Author: sueann <sue...@databricks.com>
Authored: Fri Feb 10 11:50:23 2017 -0800
Committer: Joseph K. Bradley <jos...@databricks.com>
Committed: Fri Feb 10 11:50:23 2017 -0800

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/ml/clustering/LDA.scala | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3a43ae7c/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala 
b/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
index 03f4ac5..bbcef35 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
@@ -418,11 +418,11 @@ abstract class LDAModel private[ml] (
    * If this model was produced by EM, then this local representation may be 
built lazily.
    */
   @Since("1.6.0")
-  protected def oldLocalModel: OldLocalLDAModel
+  private[clustering] def oldLocalModel: OldLocalLDAModel
 
   /** Returns underlying spark.mllib model, which may be local or distributed 
*/
   @Since("1.6.0")
-  protected def getModel: OldLDAModel
+  private[clustering] def getModel: OldLDAModel
 
   private[ml] def getEffectiveDocConcentration: Array[Double] = 
getModel.docConcentration.toArray
 
@@ -563,7 +563,7 @@ abstract class LDAModel private[ml] (
 class LocalLDAModel private[ml] (
     uid: String,
     vocabSize: Int,
-    @Since("1.6.0") override protected val oldLocalModel: OldLocalLDAModel,
+    @Since("1.6.0") override private[clustering] val oldLocalModel: 
OldLocalLDAModel,
     sparkSession: SparkSession)
   extends LDAModel(uid, vocabSize, sparkSession) {
 
@@ -573,7 +573,7 @@ class LocalLDAModel private[ml] (
     copyValues(copied, extra).setParent(parent).asInstanceOf[LocalLDAModel]
   }
 
-  override protected def getModel: OldLDAModel = oldLocalModel
+  override private[clustering] def getModel: OldLDAModel = oldLocalModel
 
   @Since("1.6.0")
   override def isDistributed: Boolean = false
@@ -656,14 +656,14 @@ class DistributedLDAModel private[ml] (
     private var oldLocalModelOption: Option[OldLocalLDAModel])
   extends LDAModel(uid, vocabSize, sparkSession) {
 
-  override protected def oldLocalModel: OldLocalLDAModel = {
+  override private[clustering] def oldLocalModel: OldLocalLDAModel = {
     if (oldLocalModelOption.isEmpty) {
       oldLocalModelOption = Some(oldDistributedModel.toLocal)
     }
     oldLocalModelOption.get
   }
 
-  override protected def getModel: OldLDAModel = oldDistributedModel
+  override private[clustering] def getModel: OldLDAModel = oldDistributedModel
 
   /**
    * Convert this distributed model to a local representation.  This discards 
info about the


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to