Github user debasish83 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/17862#discussion_r115741206
  
    --- Diff: 
mllib/src/test/scala/org/apache/spark/ml/classification/LinearSVCSuite.scala ---
    @@ -154,22 +159,23 @@ class LinearSVCSuite extends SparkFunSuite with 
MLlibTestSparkContext with Defau
     
       test("linearSVC with sample weights") {
         def modelEquals(m1: LinearSVCModel, m2: LinearSVCModel): Unit = {
    -      assert(m1.coefficients ~== m2.coefficients absTol 0.05)
    +      assert(m1.coefficients ~== m2.coefficients absTol 0.07)
           assert(m1.intercept ~== m2.intercept absTol 0.05)
         }
    -
    -    val estimator = new LinearSVC().setRegParam(0.01).setTol(0.01)
    -    val dataset = smallBinaryDataset
    -    MLTestingUtils.testArbitrarilyScaledWeights[LinearSVCModel, LinearSVC](
    -      dataset.as[LabeledPoint], estimator, modelEquals)
    -    MLTestingUtils.testOutliersWithSmallWeights[LinearSVCModel, LinearSVC](
    -      dataset.as[LabeledPoint], estimator, 2, modelEquals, outlierRatio = 
3)
    -    MLTestingUtils.testOversamplingVsWeighting[LinearSVCModel, LinearSVC](
    -      dataset.as[LabeledPoint], estimator, modelEquals, 42L)
    +    LinearSVC.supportedOptimizers.foreach { opt =>
    +      val estimator = new 
LinearSVC().setRegParam(0.02).setTol(0.01).setSolver(opt)
    +      val dataset = smallBinaryDataset
    +      MLTestingUtils.testArbitrarilyScaledWeights[LinearSVCModel, 
LinearSVC](
    +        dataset.as[LabeledPoint], estimator, modelEquals)
    +      MLTestingUtils.testOutliersWithSmallWeights[LinearSVCModel, 
LinearSVC](
    +        dataset.as[LabeledPoint], estimator, 2, modelEquals, outlierRatio 
= 3)
    +      MLTestingUtils.testOversamplingVsWeighting[LinearSVCModel, 
LinearSVC](
    +        dataset.as[LabeledPoint], estimator, modelEquals, 42L)
    +    }
       }
     
    -  test("linearSVC comparison with R e1071 and scikit-learn") {
    -    val trainer1 = new LinearSVC()
    +  test("linearSVC OWLQN comparison with R e1071 and scikit-learn") {
    +    val trainer1 = new LinearSVC().setSolver(LinearSVC.OWLQN)
           .setRegParam(0.00002) // set regParam = 2.0 / datasize / c
    --- End diff --
    
    hinge loss is not differentiable...how are you smoothing it before you can 
use a quasi newton solver....since the papers smooth the max, a 
newton/quasi-newton solver should work well...if you are keeping the 
non-differentiable loss better will be to use a sub-gradient solver as 
suggested by the talk...I will evaluate the formulation...


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to