spark git commit: [SPARK-10231] [MLLIB] update @Since annotation for mllib.classification

2015-08-25 Thread dbtsai
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 c740f5dd2 -> 5a32ed75c


[SPARK-10231] [MLLIB] update @Since annotation for mllib.classification

Update `Since` annotation in `mllib.classification`:

1. add version to classes, objects, constructors, and public variables declared 
in constructors
2. correct some versions
3. remove `Since` on `toString`

MechCoder dbtsai

Author: Xiangrui Meng 

Closes #8421 from mengxr/SPARK-10231 and squashes the following commits:

b2dce80 [Xiangrui Meng] update @Since annotation for mllib.classification

(cherry picked from commit 16a2be1a84c0a274a60c0a584faaf58b55d4942b)
Signed-off-by: DB Tsai 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5a32ed75
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5a32ed75
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5a32ed75

Branch: refs/heads/branch-1.5
Commit: 5a32ed75c939dc42886ea940aba2b14b89e9f40e
Parents: c740f5d
Author: Xiangrui Meng 
Authored: Tue Aug 25 12:16:23 2015 -0700
Committer: DB Tsai 
Committed: Tue Aug 25 12:16:41 2015 -0700

--
 .../classification/ClassificationModel.scala|  7 ++---
 .../classification/LogisticRegression.scala | 20 +-
 .../spark/mllib/classification/NaiveBayes.scala | 28 +++-
 .../apache/spark/mllib/classification/SVM.scala | 15 +++
 .../StreamingLogisticRegressionWithSGD.scala|  9 ++-
 5 files changed, 58 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/5a32ed75/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
--
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
index a29b425..85a4132 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
@@ -30,6 +30,7 @@ import org.apache.spark.rdd.RDD
  * belongs. The categories are represented by double values: 0.0, 1.0, 2.0, 
etc.
  */
 @Experimental
+@Since("0.8.0")
 trait ClassificationModel extends Serializable {
   /**
* Predict values for the given data set using the model trained.
@@ -37,7 +38,7 @@ trait ClassificationModel extends Serializable {
* @param testData RDD representing data points to be predicted
* @return an RDD[Double] where each entry contains the corresponding 
prediction
*/
-  @Since("0.8.0")
+  @Since("1.0.0")
   def predict(testData: RDD[Vector]): RDD[Double]
 
   /**
@@ -46,7 +47,7 @@ trait ClassificationModel extends Serializable {
* @param testData array representing a single data point
* @return predicted category from the trained model
*/
-  @Since("0.8.0")
+  @Since("1.0.0")
   def predict(testData: Vector): Double
 
   /**
@@ -54,7 +55,7 @@ trait ClassificationModel extends Serializable {
* @param testData JavaRDD representing data points to be predicted
* @return a JavaRDD[java.lang.Double] where each entry contains the 
corresponding prediction
*/
-  @Since("0.8.0")
+  @Since("1.0.0")
   def predict(testData: JavaRDD[Vector]): JavaRDD[java.lang.Double] =
 predict(testData.rdd).toJavaRDD().asInstanceOf[JavaRDD[java.lang.Double]]
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/5a32ed75/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
--
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
index e03e662..5ceff5b 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
@@ -41,11 +41,12 @@ import org.apache.spark.rdd.RDD
  *   Multinomial Logistic Regression. By default, it is binary 
logistic regression
  *   so numClasses will be set to 2.
  */
-class LogisticRegressionModel (
-override val weights: Vector,
-override val intercept: Double,
-val numFeatures: Int,
-val numClasses: Int)
+@Since("0.8.0")
+class LogisticRegressionModel @Since("1.3.0") (
+@Since("1.0.0") override val weights: Vector,
+@Since("1.0.0") override val intercept: Double,
+@Since("1.3.0") val numFeatures: Int,
+@Since("1.3.0") val numClasses: Int)
   extends GeneralizedLinearModel(weights, intercept) with ClassificationModel 
with Serializable
   with Saveable with PMMLExportable {
 
@@ -75,6 

spark git commit: [SPARK-10231] [MLLIB] update @Since annotation for mllib.classification

2015-08-25 Thread dbtsai
Repository: spark
Updated Branches:
  refs/heads/master 881208a8e -> 16a2be1a8


[SPARK-10231] [MLLIB] update @Since annotation for mllib.classification

Update `Since` annotation in `mllib.classification`:

1. add version to classes, objects, constructors, and public variables declared 
in constructors
2. correct some versions
3. remove `Since` on `toString`

MechCoder dbtsai

Author: Xiangrui Meng 

Closes #8421 from mengxr/SPARK-10231 and squashes the following commits:

b2dce80 [Xiangrui Meng] update @Since annotation for mllib.classification


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/16a2be1a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/16a2be1a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/16a2be1a

Branch: refs/heads/master
Commit: 16a2be1a84c0a274a60c0a584faaf58b55d4942b
Parents: 881208a
Author: Xiangrui Meng 
Authored: Tue Aug 25 12:16:23 2015 -0700
Committer: DB Tsai 
Committed: Tue Aug 25 12:16:23 2015 -0700

--
 .../classification/ClassificationModel.scala|  7 ++---
 .../classification/LogisticRegression.scala | 20 +-
 .../spark/mllib/classification/NaiveBayes.scala | 28 +++-
 .../apache/spark/mllib/classification/SVM.scala | 15 +++
 .../StreamingLogisticRegressionWithSGD.scala|  9 ++-
 5 files changed, 58 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/16a2be1a/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
--
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
index a29b425..85a4132 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
@@ -30,6 +30,7 @@ import org.apache.spark.rdd.RDD
  * belongs. The categories are represented by double values: 0.0, 1.0, 2.0, 
etc.
  */
 @Experimental
+@Since("0.8.0")
 trait ClassificationModel extends Serializable {
   /**
* Predict values for the given data set using the model trained.
@@ -37,7 +38,7 @@ trait ClassificationModel extends Serializable {
* @param testData RDD representing data points to be predicted
* @return an RDD[Double] where each entry contains the corresponding 
prediction
*/
-  @Since("0.8.0")
+  @Since("1.0.0")
   def predict(testData: RDD[Vector]): RDD[Double]
 
   /**
@@ -46,7 +47,7 @@ trait ClassificationModel extends Serializable {
* @param testData array representing a single data point
* @return predicted category from the trained model
*/
-  @Since("0.8.0")
+  @Since("1.0.0")
   def predict(testData: Vector): Double
 
   /**
@@ -54,7 +55,7 @@ trait ClassificationModel extends Serializable {
* @param testData JavaRDD representing data points to be predicted
* @return a JavaRDD[java.lang.Double] where each entry contains the 
corresponding prediction
*/
-  @Since("0.8.0")
+  @Since("1.0.0")
   def predict(testData: JavaRDD[Vector]): JavaRDD[java.lang.Double] =
 predict(testData.rdd).toJavaRDD().asInstanceOf[JavaRDD[java.lang.Double]]
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/16a2be1a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
--
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
index e03e662..5ceff5b 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
@@ -41,11 +41,12 @@ import org.apache.spark.rdd.RDD
  *   Multinomial Logistic Regression. By default, it is binary 
logistic regression
  *   so numClasses will be set to 2.
  */
-class LogisticRegressionModel (
-override val weights: Vector,
-override val intercept: Double,
-val numFeatures: Int,
-val numClasses: Int)
+@Since("0.8.0")
+class LogisticRegressionModel @Since("1.3.0") (
+@Since("1.0.0") override val weights: Vector,
+@Since("1.0.0") override val intercept: Double,
+@Since("1.3.0") val numFeatures: Int,
+@Since("1.3.0") val numClasses: Int)
   extends GeneralizedLinearModel(weights, intercept) with ClassificationModel 
with Serializable
   with Saveable with PMMLExportable {
 
@@ -75,6 +76,7 @@ class LogisticRegressionModel (
   /**
* Constructs a [[LogisticRegressionModel]] with we