spark git commit: [SPARK-21170][CORE] Utils.tryWithSafeFinallyAndFailureCallbacks throws IllegalArgumentException: Self-suppression not permitted

2017-07-01 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/branch-2.2 85fddf406 -> 6fd39ea1c


[SPARK-21170][CORE] Utils.tryWithSafeFinallyAndFailureCallbacks throws 
IllegalArgumentException: Self-suppression not permitted

## What changes were proposed in this pull request?

Not adding the exception to the suppressed if it is the same instance as 
originalThrowable.

## How was this patch tested?

Added new tests to verify this, these tests fail without source code changes 
and passes with the change.

Author: Devaraj K 

Closes #18384 from devaraj-kavali/SPARK-21170.

(cherry picked from commit 6beca9ce94f484de2f9ffb946bef8334781b3122)
Signed-off-by: Sean Owen 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6fd39ea1
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6fd39ea1
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6fd39ea1

Branch: refs/heads/branch-2.2
Commit: 6fd39ea1c9dbf68763cb394a28d8a13c116341df
Parents: 85fddf4
Author: Devaraj K 
Authored: Sat Jul 1 15:53:49 2017 +0100
Committer: Sean Owen 
Committed: Sat Jul 1 15:54:18 2017 +0100

--
 .../scala/org/apache/spark/util/Utils.scala | 30 +++
 .../org/apache/spark/util/UtilsSuite.scala  | 88 +++-
 2 files changed, 99 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/6fd39ea1/core/src/main/scala/org/apache/spark/util/Utils.scala
--
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 67497bb..999486c 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1345,14 +1345,10 @@ private[spark] object Utils extends Logging {
   try {
 finallyBlock
   } catch {
-case t: Throwable =>
-  if (originalThrowable != null) {
-originalThrowable.addSuppressed(t)
-logWarning(s"Suppressing exception in finally: " + t.getMessage, t)
-throw originalThrowable
-  } else {
-throw t
-  }
+case t: Throwable if (originalThrowable != null && originalThrowable 
!= t) =>
+  originalThrowable.addSuppressed(t)
+  logWarning(s"Suppressing exception in finally: ${t.getMessage}", t)
+  throw originalThrowable
   }
 }
   }
@@ -1384,22 +1380,20 @@ private[spark] object Utils extends Logging {
   catchBlock
 } catch {
   case t: Throwable =>
-originalThrowable.addSuppressed(t)
-logWarning(s"Suppressing exception in catch: " + t.getMessage, t)
+if (originalThrowable != t) {
+  originalThrowable.addSuppressed(t)
+  logWarning(s"Suppressing exception in catch: ${t.getMessage}", t)
+}
 }
 throw originalThrowable
 } finally {
   try {
 finallyBlock
   } catch {
-case t: Throwable =>
-  if (originalThrowable != null) {
-originalThrowable.addSuppressed(t)
-logWarning(s"Suppressing exception in finally: " + t.getMessage, t)
-throw originalThrowable
-  } else {
-throw t
-  }
+case t: Throwable if (originalThrowable != null && originalThrowable 
!= t) =>
+  originalThrowable.addSuppressed(t)
+  logWarning(s"Suppressing exception in finally: ${t.getMessage}", t)
+  throw originalThrowable
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/6fd39ea1/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
--
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 3339d5b..d130a1d 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -38,7 +38,7 @@ import org.apache.commons.math3.stat.inference.ChiSquareTest
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 
-import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.{SparkConf, SparkFunSuite, TaskContext}
 import org.apache.spark.internal.Logging
 import org.apache.spark.network.util.ByteUnit
 
@@ -1025,4 +1025,90 @@ class UtilsSuite extends SparkFunSuite with 
ResetSystemProperties with Logging {
 assert(redactedConf("spark.sensitive.property") === 
Utils.REDACTION_REPLACEMENT_TEXT)
 
   }
+
+  test("tryWithSafeFinally") {
+var e = new Error("Block0")
+val finallyBlockError = new 

spark git commit: [SPARK-21170][CORE] Utils.tryWithSafeFinallyAndFailureCallbacks throws IllegalArgumentException: Self-suppression not permitted

2017-07-01 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/master e0b047eaf -> 6beca9ce9


[SPARK-21170][CORE] Utils.tryWithSafeFinallyAndFailureCallbacks throws 
IllegalArgumentException: Self-suppression not permitted

## What changes were proposed in this pull request?

Not adding the exception to the suppressed if it is the same instance as 
originalThrowable.

## How was this patch tested?

Added new tests to verify this, these tests fail without source code changes 
and passes with the change.

Author: Devaraj K 

Closes #18384 from devaraj-kavali/SPARK-21170.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6beca9ce
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6beca9ce
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6beca9ce

Branch: refs/heads/master
Commit: 6beca9ce94f484de2f9ffb946bef8334781b3122
Parents: e0b047e
Author: Devaraj K 
Authored: Sat Jul 1 15:53:49 2017 +0100
Committer: Sean Owen 
Committed: Sat Jul 1 15:53:49 2017 +0100

--
 .../scala/org/apache/spark/util/Utils.scala | 30 +++
 .../org/apache/spark/util/UtilsSuite.scala  | 88 +++-
 2 files changed, 99 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/6beca9ce/core/src/main/scala/org/apache/spark/util/Utils.scala
--
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index bbb7999..26f61e2 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1348,14 +1348,10 @@ private[spark] object Utils extends Logging {
   try {
 finallyBlock
   } catch {
-case t: Throwable =>
-  if (originalThrowable != null) {
-originalThrowable.addSuppressed(t)
-logWarning(s"Suppressing exception in finally: " + t.getMessage, t)
-throw originalThrowable
-  } else {
-throw t
-  }
+case t: Throwable if (originalThrowable != null && originalThrowable 
!= t) =>
+  originalThrowable.addSuppressed(t)
+  logWarning(s"Suppressing exception in finally: ${t.getMessage}", t)
+  throw originalThrowable
   }
 }
   }
@@ -1387,22 +1383,20 @@ private[spark] object Utils extends Logging {
   catchBlock
 } catch {
   case t: Throwable =>
-originalThrowable.addSuppressed(t)
-logWarning(s"Suppressing exception in catch: " + t.getMessage, t)
+if (originalThrowable != t) {
+  originalThrowable.addSuppressed(t)
+  logWarning(s"Suppressing exception in catch: ${t.getMessage}", t)
+}
 }
 throw originalThrowable
 } finally {
   try {
 finallyBlock
   } catch {
-case t: Throwable =>
-  if (originalThrowable != null) {
-originalThrowable.addSuppressed(t)
-logWarning(s"Suppressing exception in finally: " + t.getMessage, t)
-throw originalThrowable
-  } else {
-throw t
-  }
+case t: Throwable if (originalThrowable != null && originalThrowable 
!= t) =>
+  originalThrowable.addSuppressed(t)
+  logWarning(s"Suppressing exception in finally: ${t.getMessage}", t)
+  throw originalThrowable
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/6beca9ce/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
--
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index f7bc8f8..4ce143f 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -38,7 +38,7 @@ import org.apache.commons.math3.stat.inference.ChiSquareTest
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 
-import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.{SparkConf, SparkFunSuite, TaskContext}
 import org.apache.spark.internal.Logging
 import org.apache.spark.network.util.ByteUnit
 
@@ -1024,4 +1024,90 @@ class UtilsSuite extends SparkFunSuite with 
ResetSystemProperties with Logging {
 assert(redactedConf("spark.sensitive.property") === 
Utils.REDACTION_REPLACEMENT_TEXT)
 
   }
+
+  test("tryWithSafeFinally") {
+var e = new Error("Block0")
+val finallyBlockError = new Error("Finally Block")
+var isErrorOccurred = false
+// if the try and finally blocks throw different exception instances
+  

spark git commit: [SPARK-18518][ML] HasSolver supports override

2017-07-01 Thread yliang
Repository: spark
Updated Branches:
  refs/heads/master 37ef32e51 -> e0b047eaf


[SPARK-18518][ML] HasSolver supports override

## What changes were proposed in this pull request?
1, make param support non-final with `finalFields` option
2, generate `HasSolver` with `finalFields = false`
3, override `solver` in LiR, GLR, and make MLPC inherit `HasSolver`

## How was this patch tested?
existing tests

Author: Ruifeng Zheng 
Author: Zheng RuiFeng 

Closes #16028 from zhengruifeng/param_non_final.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e0b047ea
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e0b047ea
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e0b047ea

Branch: refs/heads/master
Commit: e0b047eafed92eadf6842a9df964438095e12d41
Parents: 37ef32e
Author: Ruifeng Zheng 
Authored: Sat Jul 1 15:37:41 2017 +0800
Committer: Yanbo Liang 
Committed: Sat Jul 1 15:37:41 2017 +0800

--
 .../MultilayerPerceptronClassifier.scala| 19 
 .../ml/param/shared/SharedParamsCodeGen.scala   | 11 +++--
 .../spark/ml/param/shared/sharedParams.scala|  8 ++--
 .../GeneralizedLinearRegression.scala   | 21 -
 .../spark/ml/regression/LinearRegression.scala  | 46 +++-
 python/pyspark/ml/classification.py | 18 +---
 python/pyspark/ml/regression.py |  5 +++
 7 files changed, 82 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/e0b047ea/mllib/src/main/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifier.scala
--
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifier.scala
 
b/mllib/src/main/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifier.scala
index ec39f96..ceba11e 100644
--- 
a/mllib/src/main/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifier.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifier.scala
@@ -27,13 +27,16 @@ import org.apache.spark.ml.ann.{FeedForwardTopology, 
FeedForwardTrainer}
 import org.apache.spark.ml.feature.LabeledPoint
 import org.apache.spark.ml.linalg.{Vector, Vectors}
 import org.apache.spark.ml.param._
-import org.apache.spark.ml.param.shared.{HasMaxIter, HasSeed, HasStepSize, 
HasTol}
+import org.apache.spark.ml.param.shared._
 import org.apache.spark.ml.util._
 import org.apache.spark.sql.Dataset
 
 /** Params for Multilayer Perceptron. */
 private[classification] trait MultilayerPerceptronParams extends 
PredictorParams
-  with HasSeed with HasMaxIter with HasTol with HasStepSize {
+  with HasSeed with HasMaxIter with HasTol with HasStepSize with HasSolver {
+
+  import MultilayerPerceptronClassifier._
+
   /**
* Layer sizes including input size and output size.
*
@@ -78,14 +81,10 @@ private[classification] trait MultilayerPerceptronParams 
extends PredictorParams
* @group expertParam
*/
   @Since("2.0.0")
-  final val solver: Param[String] = new Param[String](this, "solver",
+  final override val solver: Param[String] = new Param[String](this, "solver",
 "The solver algorithm for optimization. Supported options: " +
-  s"${MultilayerPerceptronClassifier.supportedSolvers.mkString(", ")}. 
(Default l-bfgs)",
-
ParamValidators.inArray[String](MultilayerPerceptronClassifier.supportedSolvers))
-
-  /** @group expertGetParam */
-  @Since("2.0.0")
-  final def getSolver: String = $(solver)
+  s"${supportedSolvers.mkString(", ")}. (Default l-bfgs)",
+ParamValidators.inArray[String](supportedSolvers))
 
   /**
* The initial weights of the model.
@@ -101,7 +100,7 @@ private[classification] trait MultilayerPerceptronParams 
extends PredictorParams
   final def getInitialWeights: Vector = $(initialWeights)
 
   setDefault(maxIter -> 100, tol -> 1e-6, blockSize -> 128,
-solver -> MultilayerPerceptronClassifier.LBFGS, stepSize -> 0.03)
+solver -> LBFGS, stepSize -> 0.03)
 }
 
 /** Label to vector converter. */

http://git-wip-us.apache.org/repos/asf/spark/blob/e0b047ea/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
--
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
 
b/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
index 013817a..23e0d45 100644
--- 
a/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
@@ -80,8 +80,7 @@ 

spark git commit: [SPARK-21275][ML] Update GLM test to use supportedFamilyNames

2017-07-01 Thread yliang
Repository: spark
Updated Branches:
  refs/heads/master b1d719e7c -> 37ef32e51


[SPARK-21275][ML] Update GLM test to use supportedFamilyNames

## What changes were proposed in this pull request?
Update GLM test to use supportedFamilyNames as suggested here:
https://github.com/apache/spark/pull/16699#discussion-diff-100574976R855

Author: actuaryzhang 

Closes #18495 from actuaryzhang/mlGlmTest2.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/37ef32e5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/37ef32e5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/37ef32e5

Branch: refs/heads/master
Commit: 37ef32e515ea071afe63b56ba0d4299bb76e8a75
Parents: b1d719e
Author: actuaryzhang 
Authored: Sat Jul 1 14:57:57 2017 +0800
Committer: Yanbo Liang 
Committed: Sat Jul 1 14:57:57 2017 +0800

--
 .../GeneralizedLinearRegressionSuite.scala  | 33 ++--
 1 file changed, 16 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/37ef32e5/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
--
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
index 83f1344..a47bd17 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
@@ -749,15 +749,15 @@ class GeneralizedLinearRegressionSuite
   library(statmod)
   y <- c(1.0, 0.5, 0.7, 0.3)
   w <- c(1, 2, 3, 4)
-  for (fam in list(gaussian(), poisson(), binomial(), Gamma(), 
tweedie(1.6))) {
+  for (fam in list(binomial(), Gamma(), gaussian(), poisson(), 
tweedie(1.6))) {
 model1 <- glm(y ~ 1, family = fam)
 model2 <- glm(y ~ 1, family = fam, weights = w)
 print(as.vector(c(coef(model1), coef(model2
   }
-  [1] 0.625 0.530
-  [1] -0.4700036 -0.6348783
   [1] 0.5108256 0.1201443
   [1] 1.60 1.886792
+  [1] 0.625 0.530
+  [1] -0.4700036 -0.6348783
   [1] 1.325782 1.463641
  */
 
@@ -768,13 +768,13 @@ class GeneralizedLinearRegressionSuite
   Instance(0.3, 4.0, Vectors.zeros(0))
 ).toDF()
 
-val expected = Seq(0.625, 0.530, -0.4700036, -0.6348783, 0.5108256, 
0.1201443,
-  1.60, 1.886792, 1.325782, 1.463641)
+val expected = Seq(0.5108256, 0.1201443, 1.60, 1.886792, 0.625, 0.530,
+  -0.4700036, -0.6348783, 1.325782, 1.463641)
 
 import GeneralizedLinearRegression._
 
 var idx = 0
-for (family <- Seq("gaussian", "poisson", "binomial", "gamma", "tweedie")) 
{
+for (family <- GeneralizedLinearRegression.supportedFamilyNames.sortWith(_ 
< _)) {
   for (useWeight <- Seq(false, true)) {
 val trainer = new GeneralizedLinearRegression().setFamily(family)
 if (useWeight) trainer.setWeightCol("weight")
@@ -807,7 +807,7 @@ class GeneralizedLinearRegressionSuite
 0.5, 2.1, 0.5, 1.0, 2.0,
 0.9, 0.4, 1.0, 2.0, 1.0,
 0.7, 0.7, 0.0, 3.0, 3.0), 4, 5, byrow = TRUE))
-  families <- list(gaussian, binomial, poisson, Gamma, tweedie(1.5))
+  families <- list(binomial, Gamma, gaussian, poisson, tweedie(1.5))
   f1 <- V1 ~ -1 + V4 + V5
   f2 <- V1 ~ V4 + V5
   for (f in c(f1, f2)) {
@@ -816,15 +816,15 @@ class GeneralizedLinearRegressionSuite
   print(as.vector(coef(model)))
 }
   }
-  [1]  0.5169222 -0.334
   [1]  0.9419107 -0.6864404
-  [1]  0.1812436 -0.6568422
   [1] -0.2869094  0.7857710
+  [1]  0.5169222 -0.334
+  [1]  0.1812436 -0.6568422
   [1] 0.1055254 0.2979113
-  [1] -0.05990345  0.53188982 -0.32118415
   [1] -0.2147117  0.9911750 -0.6356096
-  [1] -1.5616130  0.6646470 -0.3192581
   [1]  0.3390397 -0.3406099  0.6870259
+  [1] -0.05990345  0.53188982 -0.32118415
+  [1] -1.5616130  0.6646470 -0.3192581
   [1] 0.3665034 0.1039416 0.1484616
 */
 val dataset = Seq(
@@ -835,23 +835,22 @@ class GeneralizedLinearRegressionSuite
 ).toDF()
 
 val expected = Seq(
-  Vectors.dense(0, 0.5169222, -0.334),
   Vectors.dense(0, 0.9419107, -0.6864404),
-  Vectors.dense(0, 0.1812436, -0.6568422),
   Vectors.dense(0, -0.2869094, 0.785771),
+  Vectors.dense(0, 0.5169222, -0.334),
+  Vectors.dense(0, 0.1812436, -0.6568422),
   Vectors.dense(0, 0.1055254, 0.2979113),
-  Vectors.dense(-0.05990345, 0.53188982, -0.32118415),