Repository: spark
Updated Branches:
  refs/heads/master 3a423f5a0 -> c64a8ff39


[SPARK-18049][MLLIB][TEST] Add missing tests for truePositiveRate and 
weightedTruePositiveRate

## What changes were proposed in this pull request?
Add missing tests for `truePositiveRate` and `weightedTruePositiveRate` in 
`MulticlassMetricsSuite`

## How was this patch tested?
added testing

Author: Zheng RuiFeng <[email protected]>

Closes #15585 from zhengruifeng/mc_missing_test.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c64a8ff3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c64a8ff3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c64a8ff3

Branch: refs/heads/master
Commit: c64a8ff39794d60c596c0d34130019c09c9c8012
Parents: 3a423f5
Author: Zheng RuiFeng <[email protected]>
Authored: Mon Oct 24 10:25:24 2016 +0100
Committer: Sean Owen <[email protected]>
Committed: Mon Oct 24 10:25:24 2016 +0100

----------------------------------------------------------------------
 .../spark/api/python/WriteInputFormatTestDataGenerator.scala | 2 +-
 .../src/main/scala/org/apache/spark/ml/util/ReadWrite.scala  | 2 +-
 .../apache/spark/mllib/evaluation/RegressionMetrics.scala    | 2 +-
 .../apache/spark/mllib/linalg/distributed/BlockMatrix.scala  | 4 ++--
 .../spark/mllib/evaluation/MulticlassMetricsSuite.scala      | 8 ++++++++
 .../spark/mllib/evaluation/MultilabelMetricsSuite.scala      | 2 +-
 6 files changed, 14 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c64a8ff3/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala
 
b/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala
index 34cb7c6..86965db 100644
--- 
a/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala
+++ 
b/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala
@@ -144,7 +144,7 @@ object WriteInputFormatTestDataGenerator {
 
     // Create test data for ArrayWritable
     val data = Seq(
-      (1, Array()),
+      (1, Array.empty[Double]),
       (2, Array(3.0, 4.0, 5.0)),
       (3, Array(4.0, 5.0, 6.0))
     )

http://git-wip-us.apache.org/repos/asf/spark/blob/c64a8ff3/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala 
b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
index 4413fef..bc4f9e6 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
@@ -474,7 +474,7 @@ private[ml] object MetaAlgorithmReadWrite {
       case ovr: OneVsRest => Array(ovr.getClassifier)
       case ovrModel: OneVsRestModel => Array(ovrModel.getClassifier) ++ 
ovrModel.models
       case rformModel: RFormulaModel => Array(rformModel.pipelineModel)
-      case _: Params => Array()
+      case _: Params => Array.empty[Params]
     }
     val subStageMaps = subStages.flatMap(getUidMapImpl)
     List((instance.uid, instance)) ++ subStageMaps

http://git-wip-us.apache.org/repos/asf/spark/blob/c64a8ff3/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RegressionMetrics.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RegressionMetrics.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RegressionMetrics.scala
index ce44215..8f777cc 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RegressionMetrics.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RegressionMetrics.scala
@@ -73,7 +73,7 @@ class RegressionMetrics @Since("2.0.0") (
 
   /**
    * Returns the variance explained by regression.
-   * explainedVariance = $\sum_i (\hat{y_i} - \bar{y})^2 / n$
+   * explainedVariance = $\sum_i (\hat{y_i} - \bar{y})^2^ / n$
    * @see [[https://en.wikipedia.org/wiki/Fraction_of_variance_unexplained]]
    */
   @Since("1.2.0")

http://git-wip-us.apache.org/repos/asf/spark/blob/c64a8ff3/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
 
b/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
index ff10684..377be6b 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
@@ -429,14 +429,14 @@ class BlockMatrix @Since("1.3.0") (
 
     val rightCounterpartsHelper = 
rightMatrix.groupBy(_._1).mapValues(_.map(_._2))
     val leftDestinations = leftMatrix.map { case (rowIndex, colIndex) =>
-      val rightCounterparts = rightCounterpartsHelper.getOrElse(colIndex, 
Array())
+      val rightCounterparts = rightCounterpartsHelper.getOrElse(colIndex, 
Array.empty[Int])
       val partitions = rightCounterparts.map(b => 
partitioner.getPartition((rowIndex, b)))
       ((rowIndex, colIndex), partitions.toSet)
     }.toMap
 
     val leftCounterpartsHelper = 
leftMatrix.groupBy(_._2).mapValues(_.map(_._1))
     val rightDestinations = rightMatrix.map { case (rowIndex, colIndex) =>
-      val leftCounterparts = leftCounterpartsHelper.getOrElse(rowIndex, 
Array())
+      val leftCounterparts = leftCounterpartsHelper.getOrElse(rowIndex, 
Array.empty[Int])
       val partitions = leftCounterparts.map(b => partitioner.getPartition((b, 
colIndex)))
       ((rowIndex, colIndex), partitions.toSet)
     }.toMap

http://git-wip-us.apache.org/repos/asf/spark/blob/c64a8ff3/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MulticlassMetricsSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MulticlassMetricsSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MulticlassMetricsSuite.scala
index f316c67..142d1e9 100644
--- 
a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MulticlassMetricsSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MulticlassMetricsSuite.scala
@@ -36,6 +36,9 @@ class MulticlassMetricsSuite extends SparkFunSuite with 
MLlibTestSparkContext {
         (1.0, 1.0), (1.0, 1.0), (2.0, 2.0), (2.0, 0.0)), 2)
     val metrics = new MulticlassMetrics(predictionAndLabels)
     val delta = 0.0000001
+    val tpRate0 = 2.0 / (2 + 2)
+    val tpRate1 = 3.0 / (3 + 1)
+    val tpRate2 = 1.0 / (1 + 0)
     val fpRate0 = 1.0 / (9 - 4)
     val fpRate1 = 1.0 / (9 - 4)
     val fpRate2 = 1.0 / (9 - 1)
@@ -53,6 +56,9 @@ class MulticlassMetricsSuite extends SparkFunSuite with 
MLlibTestSparkContext {
     val f2measure2 = (1 + 2 * 2) * precision2 * recall2 / (2 * 2 * precision2 
+ recall2)
 
     
assert(metrics.confusionMatrix.toArray.sameElements(confusionMatrix.toArray))
+    assert(math.abs(metrics.truePositiveRate(0.0) - tpRate0) < delta)
+    assert(math.abs(metrics.truePositiveRate(1.0) - tpRate1) < delta)
+    assert(math.abs(metrics.truePositiveRate(2.0) - tpRate2) < delta)
     assert(math.abs(metrics.falsePositiveRate(0.0) - fpRate0) < delta)
     assert(math.abs(metrics.falsePositiveRate(1.0) - fpRate1) < delta)
     assert(math.abs(metrics.falsePositiveRate(2.0) - fpRate2) < delta)
@@ -75,6 +81,8 @@ class MulticlassMetricsSuite extends SparkFunSuite with 
MLlibTestSparkContext {
     assert(math.abs(metrics.accuracy - metrics.recall) < delta)
     assert(math.abs(metrics.accuracy - metrics.fMeasure) < delta)
     assert(math.abs(metrics.accuracy - metrics.weightedRecall) < delta)
+    assert(math.abs(metrics.weightedTruePositiveRate -
+      ((4.0 / 9) * tpRate0 + (4.0 / 9) * tpRate1 + (1.0 / 9) * tpRate2)) < 
delta)
     assert(math.abs(metrics.weightedFalsePositiveRate -
       ((4.0 / 9) * fpRate0 + (4.0 / 9) * fpRate1 + (1.0 / 9) * fpRate2)) < 
delta)
     assert(math.abs(metrics.weightedPrecision -

http://git-wip-us.apache.org/repos/asf/spark/blob/c64a8ff3/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MultilabelMetricsSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MultilabelMetricsSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MultilabelMetricsSuite.scala
index f3b19ae..a660492 100644
--- 
a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MultilabelMetricsSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/MultilabelMetricsSuite.scala
@@ -47,7 +47,7 @@ class MultilabelMetricsSuite extends SparkFunSuite with 
MLlibTestSparkContext {
     val scoreAndLabels: RDD[(Array[Double], Array[Double])] = sc.parallelize(
       Seq((Array(0.0, 1.0), Array(0.0, 2.0)),
         (Array(0.0, 2.0), Array(0.0, 1.0)),
-        (Array(), Array(0.0)),
+        (Array.empty[Double], Array(0.0)),
         (Array(2.0), Array(2.0)),
         (Array(2.0, 0.0), Array(2.0, 0.0)),
         (Array(0.0, 1.0, 2.0), Array(0.0, 1.0)),


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to