Github user hhbyyh commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19020#discussion_r147323528
  
    --- Diff: 
mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala ---
    @@ -69,25 +70,103 @@ private[regression] trait LinearRegressionParams 
extends PredictorParams
         "The solver algorithm for optimization. Supported options: " +
           s"${supportedSolvers.mkString(", ")}. (Default auto)",
         ParamValidators.inArray[String](supportedSolvers))
    +
    +  /**
    +   * The loss function to be optimized.
    +   * Supported options: "squaredError" and "huber".
    +   * Default: "squaredError"
    +   *
    +   * @group param
    +   */
    +  @Since("2.3.0")
    +  final override val loss: Param[String] = new Param[String](this, "loss", 
"The loss function to" +
    +    s" be optimized. Supported options: ${supportedLosses.mkString(", ")}. 
(Default squaredError)",
    +    ParamValidators.inArray[String](supportedLosses))
    +
    +  /**
    +   * The shape parameter to control the amount of robustness. Must be > 
1.0.
    +   * At larger values of epsilon, the huber criterion becomes more similar 
to least squares
    +   * regression; for small values of epsilon, the criterion is more 
similar to L1 regression.
    +   * Default is 1.35 to get as much robustness as possible while retaining
    +   * 95% statistical efficiency for normally distributed data.
    +   * Only valid when "loss" is "huber".
    +   *
    +   * @group expertParam
    +   */
    +  @Since("2.3.0")
    +  final val epsilon = new DoubleParam(this, "epsilon", "The shape 
parameter to control the " +
    +    "amount of robustness. Must be > 1.0.", ParamValidators.gt(1.0))
    +
    +  /** @group getExpertParam */
    +  @Since("2.3.0")
    +  def getEpsilon: Double = $(epsilon)
    +
    +  override protected def validateAndTransformSchema(
    +      schema: StructType,
    +      fitting: Boolean,
    +      featuresDataType: DataType): StructType = {
    +    if ($(loss) == Huber) {
    --- End diff --
    
    minor: && fitting == true?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to