Repository: incubator-predictionio
Updated Branches:
  refs/heads/develop 81b1b18fc -> a92edd983 (forced update)


[PIO-12] Add a rule for checking Scala documentation style

Closes apache/incubator-predictionio#264


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/a92edd98
Tree: 
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/a92edd98
Diff: 
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/a92edd98

Branch: refs/heads/develop
Commit: a92edd98314928428780ea03ca2ac221c9614a42
Parents: 4bdd1da
Author: Hyukjin Kwon <gurwls...@gmail.com>
Authored: Tue Aug 2 09:12:09 2016 -0700
Committer: Donald Szeto <don...@apache.org>
Committed: Tue Aug 2 09:12:09 2016 -0700

----------------------------------------------------------------------
 .../predictionio/data/storage/EntityMap.scala   |  4 +-
 .../predictionio/data/storage/Utils.scala       | 12 ++--
 .../predictionio/data/view/DataView.scala       |  4 +-
 .../e2/engine/CategoricalNaiveBayes.scala       | 64 +++++++++-----------
 .../predictionio/e2/engine/MarkovChain.scala    | 36 +++++------
 scalastyle-config.xml                           | 15 +++++
 6 files changed, 67 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/a92edd98/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
----------------------------------------------------------------------
diff --git 
a/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala 
b/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
index 81ce6bf..6d51353 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
@@ -23,9 +23,7 @@ import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
-/**
- * :: Experimental ::
- */
+/** :: Experimental :: */
 @Experimental
 class EntityIdIxMap(val idToIx: BiMap[String, Long]) extends Serializable {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/a92edd98/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
----------------------------------------------------------------------
diff --git 
a/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala 
b/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
index 34c40a3..66f389e 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
@@ -23,9 +23,8 @@ import org.joda.time.format.ISODateTimeFormat
 
 /** Backend-agnostic storage utilities. */
 private[predictionio] object Utils {
-  /**
-   * Add prefix to custom attribute keys.
-   */
+
+  /** Add prefix to custom attribute keys. */
   def addPrefixToAttributeKeys[T](
       attributes: Map[String, T],
       prefix: String = "ca_"): Map[String, T] = {
@@ -39,10 +38,9 @@ private[predictionio] object Utils {
     attributes map { case (k, v) => (k.stripPrefix(prefix), v) }
   }
 
-  /**
-   * Appends App ID to any ID.
-   * Used for distinguishing different app's data within a single collection.
-   */
+  /** Appends App ID to any ID.
+    * Used for distinguishing different app's data within a single collection.
+    */
   def idWithAppid(appid: Int, id: String): String = appid + "_" + id
 
   def stringToDateTime(dt: String): DateTime =

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/a92edd98/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
----------------------------------------------------------------------
diff --git 
a/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala 
b/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
index 9536029..4866b5d 100644
--- a/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
@@ -34,9 +34,7 @@ import scala.reflect.ClassTag
 import scala.reflect.runtime.universe._
 import scala.util.hashing.MurmurHash3
 
-/**
- * :: Experimental ::
- */
+/** :: Experimental :: */
 @Experimental
 object DataView {
   /**

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/a92edd98/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
----------------------------------------------------------------------
diff --git 
a/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
 
b/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
index 460a940..9dc6f9d 100644
--- 
a/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
+++ 
b/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
@@ -20,15 +20,13 @@ package org.apache.predictionio.e2.engine
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
-/**
- * Class for training a naive Bayes model with categorical variables
- */
+/** Class for training a naive Bayes model with categorical variables */
 object CategoricalNaiveBayes {
-  /**
-   * Train with data points and return the model
-   *
-   * @param points training data points
-   */
+
+  /** Train with data points and return the model
+    *
+    * @param points training data points
+    */
   def train(points: RDD[LabeledPoint]): CategoricalNaiveBayesModel = {
     val labelCountFeatureLikelihoods = points.map { p =>
       (p.label, p.features)
@@ -82,27 +80,25 @@ object CategoricalNaiveBayes {
   }
 }
 
-/**
- * Model for naive Bayes classifiers with categorical variables.
- *
- * @param priors log prior probabilities
- * @param likelihoods log likelihood probabilities
- */
+/** Model for naive Bayes classifiers with categorical variables.
+  *
+  * @param priors log prior probabilities
+  * @param likelihoods log likelihood probabilities
+  */
 case class CategoricalNaiveBayesModel(
   priors: Map[String, Double],
   likelihoods: Map[String, Array[Map[String, Double]]]) extends Serializable {
 
   val featureCount = likelihoods.head._2.size
 
-  /**
-   * Calculate the log score of having the given features and label
-   *
-   * @param point label and features
-   * @param defaultLikelihood a function that calculates the likelihood when a
-   *                          feature value is not present. The input to the
-   *                          function is the other feature value likelihoods.
-   * @return log score when label is present. None otherwise.
-   */
+  /** Calculate the log score of having the given features and label
+    *
+    * @param point label and features
+    * @param defaultLikelihood a function that calculates the likelihood when a
+    *                          feature value is not present. The input to the
+    *                          function is the other feature value likelihoods.
+    * @return log score when label is present. None otherwise.
+    */
   def logScore(
     point: LabeledPoint,
     defaultLikelihood: (Seq[Double]) => Double = ls => Double.NegativeInfinity
@@ -137,12 +133,11 @@ case class CategoricalNaiveBayesModel(
     prior + likelihoodScores.sum
   }
 
-  /**
-   * Return the label that yields the highest score
-   *
-   * @param features features for classification
-   *
-   */
+  /** Return the label that yields the highest score
+    *
+    * @param features features for classification
+    *
+    */
   def predict(features: Array[String]): String = {
     priors.keySet.map { label =>
       (label, logScoreInternal(label, features))
@@ -154,12 +149,11 @@ case class CategoricalNaiveBayesModel(
   }
 }
 
-/**
- * Class that represents the features and labels of a data point.
- *
- * @param label Label of this data point
- * @param features Features of this data point
- */
+/** Class that represents the features and labels of a data point.
+  *
+  * @param label Label of this data point
+  * @param features Features of this data point
+  */
 case class LabeledPoint(label: String, features: Array[String]) {
   override def toString: String = {
     val featuresString = features.mkString("[", ",", "]")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/a92edd98/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
----------------------------------------------------------------------
diff --git 
a/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala 
b/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
index dc496ad..3c3ac34 100644
--- a/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
+++ b/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
@@ -22,16 +22,14 @@ import 
org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
 import org.apache.spark.mllib.linalg.{SparseVector, Vectors}
 import org.apache.spark.rdd.RDD
 
-/**
- * Class for training a Markov Chain model
- */
+/** Class for training a Markov Chain model */
 object MarkovChain {
-  /**
-   * Train a Markov Chain model
-   *
-   * @param matrix Tally of all state transitions
-   * @param topN Use the top-N tally for each state
-   */
+
+  /** Train a Markov Chain model
+    *
+    * @param matrix Tally of all state transitions
+    * @param topN Use the top-N tally for each state
+    */
   def train(matrix: CoordinateMatrix, topN: Int): MarkovChainModel = {
     val noOfStates = matrix.numCols().toInt
     val transitionVectors = matrix.entries
@@ -57,21 +55,19 @@ object MarkovChain {
   }
 }
 
-/**
- * Markov Chain model
- *
- * @param transitionVectors transition vectors
- * @param n top N used to construct the model
- */
+/** Markov Chain model
+  *
+  * @param transitionVectors transition vectors
+  * @param n top N used to construct the model
+  */
 case class MarkovChainModel(
   transitionVectors: RDD[(Int, SparseVector)],
   n: Int) {
 
-  /**
-   * Calculate the probabilities of the next state
-   *
-   * @param currentState probabilities of the current state
-   */
+  /** Calculate the probabilities of the next state
+    *
+    * @param currentState probabilities of the current state
+    */
   def predict(currentState: Seq[Double]): Seq[Double] = {
     // multiply the input with transition matrix row by row
     val nextStateVectors = transitionVectors.map { case (rowIndex, vector) =>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/a92edd98/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 740f1c2..9791822 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -110,4 +110,19 @@ limitations under the License.
     <check level="error"
            class="org.scalastyle.scalariform.SpaceAfterCommentStartChecker"
            enabled="true"/>
+    <check customId="NoJavaDoc" class="org.scalastyle.file.RegexChecker"
+           enabled="true"
+           level="error">
+        <parameters>
+            <parameter name="regex">(?m)^(\s*)/[*][*].*$(\r|)\n^\1 
[*]</parameter>
+        </parameters>
+        <customMessage>Use Scaladoc style indentation for multiline 
comments</customMessage>
+    </check>
+
+    <!-- Do not enable ScalaDocChecker below. This forces all the classes,-->
+    <!-- traits, methods, types and properties to have documentation which-->
+    <!-- is overwhelming.-->
+    <check level="error"
+           class="org.scalastyle.scalariform.ScalaDocChecker"
+           enabled="false"/>
 </scalastyle>

Reply via email to