Github user actuaryzhang commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16699#discussion_r124403889
  
    --- Diff: 
mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala
 ---
    @@ -798,77 +798,184 @@ class GeneralizedLinearRegressionSuite
         }
       }
     
    -  test("glm summary: gaussian family with weight") {
    +  test("generalized linear regression with offset") {
         /*
    -       R code:
    +      R code:
    +      library(statmod)
     
    -       A <- matrix(c(0, 1, 2, 3, 5, 7, 11, 13), 4, 2)
    -       b <- c(17, 19, 23, 29)
    -       w <- c(1, 2, 3, 4)
    -       df <- as.data.frame(cbind(A, b))
    -     */
    -    val datasetWithWeight = Seq(
    -      Instance(17.0, 1.0, Vectors.dense(0.0, 5.0).toSparse),
    -      Instance(19.0, 2.0, Vectors.dense(1.0, 7.0)),
    -      Instance(23.0, 3.0, Vectors.dense(2.0, 11.0)),
    -      Instance(29.0, 4.0, Vectors.dense(3.0, 13.0))
    +      df <- as.data.frame(matrix(c(
    +        0.2, 1.0, 2.0, 0.0, 5.0,
    +        0.5, 2.1, 0.5, 1.0, 2.0,
    +        0.9, 0.4, 1.0, 2.0, 1.0,
    +        0.7, 0.7, 0.0, 3.0, 3.0), 4, 5, byrow = TRUE))
    +      families <- list(gaussian, binomial, poisson, Gamma, tweedie(1.5))
    +      f1 <- V1 ~ -1 + V4 + V5
    +      f2 <- V1 ~ V4 + V5
    +      for (f in c(f1, f2)) {
    +        for (fam in families) {
    +          model <- glm(f, df, family = fam, weights = V2, offset = V3)
    +          print(as.vector(coef(model)))
    +        }
    +      }
    +      [1]  0.5169222 -0.3344444
    +      [1]  0.9419107 -0.6864404
    +      [1]  0.1812436 -0.6568422
    +      [1] -0.2869094  0.7857710
    +      [1] 0.1055254 0.2979113
    +      [1] -0.05990345  0.53188982 -0.32118415
    +      [1] -0.2147117  0.9911750 -0.6356096
    +      [1] -1.5616130  0.6646470 -0.3192581
    +      [1]  0.3390397 -0.3406099  0.6870259
    +      [1] 0.3665034 0.1039416 0.1484616
    +    */
    +    val dataset = Seq(
    +      OffsetInstance(0.2, 1.0, 2.0, Vectors.dense(0.0, 5.0)),
    +      OffsetInstance(0.5, 2.1, 0.5, Vectors.dense(1.0, 2.0)),
    +      OffsetInstance(0.9, 0.4, 1.0, Vectors.dense(2.0, 1.0)),
    +      OffsetInstance(0.7, 0.7, 0.0, Vectors.dense(3.0, 3.0))
         ).toDF()
    +
    +    val expected = Seq(
    +      Vectors.dense(0, 0.5169222, -0.3344444),
    +      Vectors.dense(0, 0.9419107, -0.6864404),
    +      Vectors.dense(0, 0.1812436, -0.6568422),
    +      Vectors.dense(0, -0.2869094, 0.785771),
    +      Vectors.dense(0, 0.1055254, 0.2979113),
    +      Vectors.dense(-0.05990345, 0.53188982, -0.32118415),
    +      Vectors.dense(-0.2147117, 0.991175, -0.6356096),
    +      Vectors.dense(-1.561613, 0.664647, -0.3192581),
    +      Vectors.dense(0.3390397, -0.3406099, 0.6870259),
    +      Vectors.dense(0.3665034, 0.1039416, 0.1484616))
    +
    +    import GeneralizedLinearRegression._
    +
    +    var idx = 0
    +
    +    for (fitIntercept <- Seq(false, true)) {
    +      for (family <- Seq("gaussian", "binomial", "poisson", "gamma", 
"tweedie")) {
    +        val trainer = new GeneralizedLinearRegression().setFamily(family)
    +          .setFitIntercept(fitIntercept).setOffsetCol("offset")
    +          .setWeightCol("weight").setLinkPredictionCol("linkPrediction")
    +        if (family == "tweedie") trainer.setVariancePower(1.5)
    +        val model = trainer.fit(dataset)
    +        val actual = Vectors.dense(model.intercept, model.coefficients(0), 
model.coefficients(1))
    +        assert(actual ~= expected(idx) absTol 1e-4, s"Model mismatch: GLM 
with family = $family," +
    +          s" and fitIntercept = $fitIntercept.")
    +
    +        val familyLink = FamilyAndLink(trainer)
    +        model.transform(dataset).select("features", "offset", 
"prediction", "linkPrediction")
    +          .collect().foreach {
    +          case Row(features: DenseVector, offset: Double, prediction1: 
Double,
    +          linkPrediction1: Double) =>
    +            val eta = BLAS.dot(features, model.coefficients) + 
model.intercept + offset
    +            val prediction2 = familyLink.fitted(eta)
    +            val linkPrediction2 = eta
    +            assert(prediction1 ~= prediction2 relTol 1E-5, "Prediction 
mismatch: GLM with " +
    +              s"family = $family, and fitIntercept = $fitIntercept.")
    +            assert(linkPrediction1 ~= linkPrediction2 relTol 1E-5, "Link 
Prediction mismatch: " +
    +              s"GLM with family = $family, and fitIntercept = 
$fitIntercept.")
    +        }
    +
    +        idx += 1
    +      }
    +    }
    +  }
    +
    +  test("generalized linear regression: predict with no offset") {
    +    val trainData = Seq(
    +      OffsetInstance(2.0, 1.0, 2.0, Vectors.dense(0.0, 5.0)),
    +      OffsetInstance(8.0, 2.0, 3.0, Vectors.dense(1.0, 7.0)),
    +      OffsetInstance(3.0, 3.0, 1.0, Vectors.dense(2.0, 11.0)),
    +      OffsetInstance(9.0, 4.0, 4.0, Vectors.dense(3.0, 13.0))
    +    ).toDF()
    +    val testData = trainData.select("weight", "features")
    +
    +    val trainer = new GeneralizedLinearRegression()
    +      .setFamily("poisson")
    +      .setWeightCol("weight")
    +      .setOffsetCol("offset")
    +      .setLinkPredictionCol("linkPrediction")
    +
    +    val model = trainer.fit(trainData)
    +    model.transform(testData).select("features", "linkPrediction")
    +      .collect().foreach {
    +      case Row(features: DenseVector, linkPrediction1: Double) =>
    +        val linkPrediction2 = BLAS.dot(features, model.coefficients) + 
model.intercept
    +        assert(linkPrediction1 ~= linkPrediction2 relTol 1E-5, "Link 
Prediction mismatch")
    +    }
    +  }
    +
    +  test("glm summary: gaussian family with weight and offset") {
         /*
    -       R code:
    +      R code:
     
    -       model <- glm(formula = "b ~ .", family="gaussian", data = df, 
weights = w)
    -       summary(model)
    +      A <- matrix(c(0, 1, 2, 3, 5, 7, 11, 13), 4, 2)
    +      b <- c(17, 19, 23, 29)
    +      w <- c(1, 2, 3, 4)
    +      off <- c(2, 3, 1, 4)
    +      df <- as.data.frame(cbind(A, b))
    +     */
    +    val dataset = Seq(
    +      OffsetInstance(17.0, 1.0, 2.0, Vectors.dense(0.0, 5.0).toSparse),
    +      OffsetInstance(19.0, 2.0, 3.0, Vectors.dense(1.0, 7.0)),
    +      OffsetInstance(23.0, 3.0, 1.0, Vectors.dense(2.0, 11.0)),
    +      OffsetInstance(29.0, 4.0, 4.0, Vectors.dense(3.0, 13.0))
    +    ).toDF()
    +    /*
    +      R code:
     
    -       Deviance Residuals:
    -           1       2       3       4
    -       1.920  -1.358  -1.109   0.960
    +      model <- glm(formula = "b ~ .", family = "gaussian", data = df,
    +                   weights = w, offset = off)
    +      summary(model)
     
    -       Coefficients:
    -                   Estimate Std. Error t value Pr(>|t|)
    -       (Intercept)   18.080      9.608   1.882    0.311
    -       V1             6.080      5.556   1.094    0.471
    -       V2            -0.600      1.960  -0.306    0.811
    +      Deviance Residuals:
    +            1        2        3        4
    +       0.9600  -0.6788  -0.5543   0.4800
     
    -       (Dispersion parameter for gaussian family taken to be 7.68)
    +      Coefficients:
    +                  Estimate Std. Error t value Pr(>|t|)
    +      (Intercept)   5.5400     4.8040   1.153    0.455
    +      V1           -0.9600     2.7782  -0.346    0.788
    +      V2            1.7000     0.9798   1.735    0.333
     
    -           Null deviance: 202.00  on 3  degrees of freedom
    -       Residual deviance:   7.68  on 1  degrees of freedom
    -       AIC: 18.783
    +      (Dispersion parameter for gaussian family taken to be 1.92)
     
    -       Number of Fisher Scoring iterations: 2
    +          Null deviance: 152.10  on 3  degrees of freedom
    +      Residual deviance:   1.92  on 1  degrees of freedom
    +      AIC: 13.238
     
    -       residuals(model, type="pearson")
    -              1         2         3         4
    -       1.920000 -1.357645 -1.108513  0.960000
    +      Number of Fisher Scoring iterations: 2
     
    -       residuals(model, type="working")
    +      residuals(model, type = "pearson")
    +               1          2          3          4
    +      0.9600000 -0.6788225 -0.5542563  0.4800000
    +      residuals(model, type = "working")
               1     2     3     4
    -       1.92 -0.96 -0.64  0.48
    -
    -       residuals(model, type="response")
    +      0.96 -0.48 -0.32  0.24
    --- End diff --
    
    They don't have the same indentation with the old code but within the new 
block of code, every line is still aligned. Won't change.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to