HyukjinKwon commented on code in PR #48347: URL: https://github.com/apache/spark/pull/48347#discussion_r1830044348
########## mllib/src/main/scala/org/apache/spark/ml/feature/TargetEncoder.scala: ########## @@ -0,0 +1,472 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature + +import org.apache.hadoop.fs.Path + +import org.apache.spark.SparkException +import org.apache.spark.annotation.Since +import org.apache.spark.ml.{Estimator, Model} +import org.apache.spark.ml.attribute.NominalAttribute +import org.apache.spark.ml.param._ +import org.apache.spark.ml.param.shared._ +import org.apache.spark.ml.util._ +import org.apache.spark.sql.{Column, DataFrame, Dataset, Row} +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.types._ + +/** Private trait for params and common methods for TargetEncoder and TargetEncoderModel */ +private[ml] trait TargetEncoderBase extends Params with HasLabelCol + with HasInputCol with HasInputCols with HasOutputCol with HasOutputCols with HasHandleInvalid { + + /** + * Param for how to handle invalid data during transform(). + * Options are 'keep' (invalid data presented as an extra categorical feature) or + * 'error' (throw an error). + * Note that this Param is only used during transform; during fitting, invalid data + * will result in an error. + * Default: "error" + * @group param + */ + @Since("4.0.0") + override val handleInvalid: Param[String] = new Param[String](this, "handleInvalid", + "How to handle invalid data during transform(). " + + "Options are 'keep' (invalid data presented as an extra categorical feature) " + + "or 'error' (throw an error). Note that this Param is only used during transform; " + + "during fitting, invalid data will result in an error.", + ParamValidators.inArray(TargetEncoder.supportedHandleInvalids)) + + setDefault(handleInvalid -> TargetEncoder.ERROR_INVALID) + + @Since("4.0.0") + val targetType: Param[String] = new Param[String](this, "targetType", + "Type of label considered during fit(). " + + "Options are 'binary' and 'continuous'. When 'binary', estimates are calculated as " + + "conditional probability of the target given each category. When 'continuous', " + + "estimates are calculated as the average of the target given each category" + + "Note that this Param is only used during fitting.", + ParamValidators.inArray(TargetEncoder.supportedTargetTypes)) + + setDefault(targetType -> TargetEncoder.TARGET_BINARY) + + final def getTargetType: String = $(targetType) + + @Since("4.0.0") + val smoothing: DoubleParam = new DoubleParam(this, "smoothing", + "Smoothing factor for encodings. Smoothing blends in-class estimates with overall estimates " + + "according to the relative size of the particular class on the whole dataset, reducing the " + + "risk of overfitting due to unreliable estimates", + ParamValidators.gtEq(0.0)) + + setDefault(smoothing -> 0.0) + + final def getSmoothing: Double = $(smoothing) + + private[feature] lazy val inputFeatures = if (isSet(inputCol)) { + Array($(inputCol)) + } else if (isSet(inputCols)) { + $(inputCols) + } else { + Array.empty[String] + } Review Comment: ```suggestion private[feature] lazy val inputFeatures = if (isSet(inputCol)) { Array($(inputCol)) } else if (isSet(inputCols)) { $(inputCols) } else { Array.empty[String] } ``` ########## examples/src/main/java/org/apache/spark/examples/ml/JavaTargetEncoderExample.java: ########## @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.examples.ml; + +import org.apache.spark.sql.SparkSession; + +// $example on$ +import org.apache.spark.ml.feature.TargetEncoder; +import org.apache.spark.ml.feature.TargetEncoderModel; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.DataTypes; +import org.apache.spark.sql.types.Metadata; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; + +import java.util.Arrays; +import java.util.List; +// $example off$ + +public class JavaTargetEncoderExample { + public static void main(String[] args) { + SparkSession spark = SparkSession + .builder() + .appName("JavaTargetEncoderExample") + .getOrCreate(); + + // Note: categorical features are usually first encoded with StringIndexer + // $example on$ + List<Row> data = Arrays.asList( + RowFactory.create(0.0, 1.0, 0, 10.0), + RowFactory.create(1.0, 0.0, 1, 20.0), + RowFactory.create(2.0, 1.0, 0, 30.0), + RowFactory.create(0.0, 2.0, 1, 40.0), + RowFactory.create(0.0, 1.0, 0, 50.0), + RowFactory.create(2.0, 0.0, 1, 60.0) + ); + + StructType schema = new StructType(new StructField[]{ + new StructField("categoryIndex1", DataTypes.DoubleType, false, Metadata.empty()), + new StructField("categoryIndex2", DataTypes.DoubleType, false, Metadata.empty()), + new StructField("binaryLabel", DataTypes.DoubleType, false, Metadata.empty()), + new StructField("continuousLabel", DataTypes.DoubleType, false, Metadata.empty()) + }); + + Dataset<Row> df = spark.createDataFrame(data, schema); + + // binary target + TargetEncoder bin_encoder = new TargetEncoder() + .setInputCols(new String[] {"categoryIndex1", "categoryIndex2"}) + .setOutputCols(new String[] {"categoryIndex1Target", "categoryIndex2Target"}) + .setLabelCol("binaryLabel") + .setTargetType("binary"); + + TargetEncoderModel bin_model = bin_encoder.fit(df); + Dataset<Row> bin_encoded = bin_model.transform(df); + bin_encoded.show(); + + // continuous target + TargetEncoder cont_encoder = new TargetEncoder() + .setInputCols(new String[] {"categoryIndex1", "categoryIndex2"}) + .setOutputCols(new String[] {"categoryIndex1Target", "categoryIndex2Target"}) + .setLabelCol("continuousLabel") + .setTargetType("continuous"); Review Comment: ```suggestion TargetEncoder cont_encoder = new TargetEncoder() .setInputCols(new String[] {"categoryIndex1", "categoryIndex2"}) .setOutputCols(new String[] {"categoryIndex1Target", "categoryIndex2Target"}) .setLabelCol("continuousLabel") .setTargetType("continuous"); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); Review Comment: ```suggestion TargetEncoder encoder = new TargetEncoder() .setInputCols(new String[]{"input1", "input2", "input3"}) .setOutputCols(new String[]{"output1", "output2", "output3"}) .setTargetType("binary"); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); + + } + + @Test + public void testTargetEncoderContinuous() { + + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 10.0, 40.0, 50.0, 20.0, 42.5, 50.0, 27.5), + RowFactory.create((short)1, 4, 5.0, 20.0, 50.0, 50.0, 20.0, 50.0, 50.0, 27.5), + RowFactory.create((short)2, 3, 5.0, 30.0, 60.0, 50.0, 20.0, 57.5, 50.0, 27.5), + RowFactory.create((short)0, 4, 6.0, 40.0, 40.0, 50.0, 50.0, 42.5, 50.0, 50.0), + RowFactory.create((short)1, 3, 6.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0), + RowFactory.create((short)2, 4, 6.0, 60.0, 60.0, 50.0, 50.0, 57.5, 50.0, 50.0), + RowFactory.create((short)0, 3, 7.0, 70.0, 40.0, 50.0, 70.0, 42.5, 50.0, 60.0), + RowFactory.create((short)1, 4, 8.0, 80.0, 50.0, 50.0, 80.0, 50.0, 50.0, 65.0), + RowFactory.create((short)2, 3, null, 90.0, 60.0, 50.0, 90.0, 57.5, 50.0, 70.0)); Review Comment: ```suggestion List<Row> data = Arrays.asList( RowFactory.create((short)0, 3, 5.0, 10.0, 40.0, 50.0, 20.0, 42.5, 50.0, 27.5), RowFactory.create((short)1, 4, 5.0, 20.0, 50.0, 50.0, 20.0, 50.0, 50.0, 27.5), RowFactory.create((short)2, 3, 5.0, 30.0, 60.0, 50.0, 20.0, 57.5, 50.0, 27.5), RowFactory.create((short)0, 4, 6.0, 40.0, 40.0, 50.0, 50.0, 42.5, 50.0, 50.0), RowFactory.create((short)1, 3, 6.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0), RowFactory.create((short)2, 4, 6.0, 60.0, 60.0, 50.0, 50.0, 57.5, 50.0, 50.0), RowFactory.create((short)0, 3, 7.0, 70.0, 40.0, 50.0, 70.0, 42.5, 50.0, 60.0), RowFactory.create((short)1, 4, 8.0, 80.0, 50.0, 50.0, 80.0, 50.0, 50.0, 65.0), RowFactory.create((short)2, 3, null, 90.0, 60.0, 50.0, 90.0, 57.5, 50.0, 70.0)); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); + + } + + @Test + public void testTargetEncoderContinuous() { + + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 10.0, 40.0, 50.0, 20.0, 42.5, 50.0, 27.5), + RowFactory.create((short)1, 4, 5.0, 20.0, 50.0, 50.0, 20.0, 50.0, 50.0, 27.5), + RowFactory.create((short)2, 3, 5.0, 30.0, 60.0, 50.0, 20.0, 57.5, 50.0, 27.5), + RowFactory.create((short)0, 4, 6.0, 40.0, 40.0, 50.0, 50.0, 42.5, 50.0, 50.0), + RowFactory.create((short)1, 3, 6.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0), + RowFactory.create((short)2, 4, 6.0, 60.0, 60.0, 50.0, 50.0, 57.5, 50.0, 50.0), + RowFactory.create((short)0, 3, 7.0, 70.0, 40.0, 50.0, 70.0, 42.5, 50.0, 60.0), + RowFactory.create((short)1, 4, 8.0, 80.0, 50.0, 50.0, 80.0, 50.0, 50.0, 65.0), + RowFactory.create((short)2, 3, null, 90.0, 60.0, 50.0, 90.0, 57.5, 50.0, 70.0)); + + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("continuous"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); Review Comment: ```suggestion Assertions.assertEquals( output_smoothing.select("output1", "output2", "output3").collectAsList(), output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); ``` ########## mllib/src/main/scala/org/apache/spark/ml/feature/TargetEncoder.scala: ########## @@ -0,0 +1,472 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature + +import org.apache.hadoop.fs.Path + +import org.apache.spark.SparkException +import org.apache.spark.annotation.Since +import org.apache.spark.ml.{Estimator, Model} +import org.apache.spark.ml.attribute.NominalAttribute +import org.apache.spark.ml.param._ +import org.apache.spark.ml.param.shared._ +import org.apache.spark.ml.util._ +import org.apache.spark.sql.{Column, DataFrame, Dataset, Row} +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.types._ + +/** Private trait for params and common methods for TargetEncoder and TargetEncoderModel */ +private[ml] trait TargetEncoderBase extends Params with HasLabelCol + with HasInputCol with HasInputCols with HasOutputCol with HasOutputCols with HasHandleInvalid { + + /** + * Param for how to handle invalid data during transform(). + * Options are 'keep' (invalid data presented as an extra categorical feature) or + * 'error' (throw an error). + * Note that this Param is only used during transform; during fitting, invalid data + * will result in an error. + * Default: "error" + * @group param + */ + @Since("4.0.0") + override val handleInvalid: Param[String] = new Param[String](this, "handleInvalid", + "How to handle invalid data during transform(). " + + "Options are 'keep' (invalid data presented as an extra categorical feature) " + + "or 'error' (throw an error). Note that this Param is only used during transform; " + + "during fitting, invalid data will result in an error.", + ParamValidators.inArray(TargetEncoder.supportedHandleInvalids)) + + setDefault(handleInvalid -> TargetEncoder.ERROR_INVALID) + + @Since("4.0.0") + val targetType: Param[String] = new Param[String](this, "targetType", + "Type of label considered during fit(). " + + "Options are 'binary' and 'continuous'. When 'binary', estimates are calculated as " + + "conditional probability of the target given each category. When 'continuous', " + + "estimates are calculated as the average of the target given each category" + + "Note that this Param is only used during fitting.", + ParamValidators.inArray(TargetEncoder.supportedTargetTypes)) + + setDefault(targetType -> TargetEncoder.TARGET_BINARY) + + final def getTargetType: String = $(targetType) + + @Since("4.0.0") + val smoothing: DoubleParam = new DoubleParam(this, "smoothing", + "Smoothing factor for encodings. Smoothing blends in-class estimates with overall estimates " + + "according to the relative size of the particular class on the whole dataset, reducing the " + + "risk of overfitting due to unreliable estimates", + ParamValidators.gtEq(0.0)) + + setDefault(smoothing -> 0.0) + + final def getSmoothing: Double = $(smoothing) + + private[feature] lazy val inputFeatures = if (isSet(inputCol)) { + Array($(inputCol)) + } else if (isSet(inputCols)) { + $(inputCols) + } else { + Array.empty[String] + } + + private[feature] lazy val outputFeatures = if (isSet(outputCol)) { + Array($(outputCol)) + } else if (isSet(outputCols)) { + $(outputCols) + } else { + inputFeatures.map{field: String => s"${field}_indexed"} + } + + private[feature] def validateSchema( + schema: StructType, + fitting: Boolean): StructType = { Review Comment: ```suggestion private[feature] def validateSchema(schema: StructType, fitting: Boolean): StructType = { ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); Review Comment: ```suggestion StructType schema = createStructType(new StructField[]{ createStructField("input1", ShortType, true), createStructField("input2", IntegerType, true), createStructField("input3", DoubleType, true), createStructField("label", DoubleType, false), createStructField("expected1", DoubleType, false), createStructField("expected2", DoubleType, false), createStructField("expected3", DoubleType, false), createStructField("smoothing1", DoubleType, false), createStructField("smoothing2", DoubleType, false), createStructField("smoothing3", DoubleType, false) }); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); Review Comment: ```suggestion Assertions.assertEquals( output.select("output1", "output2", "output3").collectAsList(), output.select("expected1", "expected2", "expected3").collectAsList()); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); Review Comment: ```suggestion Assertions.assertEquals( output_smoothing.select("output1", "output2", "output3").collectAsList(), output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); ``` ########## mllib/src/main/scala/org/apache/spark/ml/feature/TargetEncoder.scala: ########## @@ -0,0 +1,472 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature + +import org.apache.hadoop.fs.Path + +import org.apache.spark.SparkException +import org.apache.spark.annotation.Since +import org.apache.spark.ml.{Estimator, Model} +import org.apache.spark.ml.attribute.NominalAttribute +import org.apache.spark.ml.param._ +import org.apache.spark.ml.param.shared._ +import org.apache.spark.ml.util._ +import org.apache.spark.sql.{Column, DataFrame, Dataset, Row} +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.types._ + +/** Private trait for params and common methods for TargetEncoder and TargetEncoderModel */ +private[ml] trait TargetEncoderBase extends Params with HasLabelCol + with HasInputCol with HasInputCols with HasOutputCol with HasOutputCols with HasHandleInvalid { + + /** + * Param for how to handle invalid data during transform(). + * Options are 'keep' (invalid data presented as an extra categorical feature) or + * 'error' (throw an error). + * Note that this Param is only used during transform; during fitting, invalid data + * will result in an error. + * Default: "error" + * @group param + */ + @Since("4.0.0") + override val handleInvalid: Param[String] = new Param[String](this, "handleInvalid", + "How to handle invalid data during transform(). " + + "Options are 'keep' (invalid data presented as an extra categorical feature) " + + "or 'error' (throw an error). Note that this Param is only used during transform; " + + "during fitting, invalid data will result in an error.", + ParamValidators.inArray(TargetEncoder.supportedHandleInvalids)) + + setDefault(handleInvalid -> TargetEncoder.ERROR_INVALID) + + @Since("4.0.0") + val targetType: Param[String] = new Param[String](this, "targetType", + "Type of label considered during fit(). " + + "Options are 'binary' and 'continuous'. When 'binary', estimates are calculated as " + + "conditional probability of the target given each category. When 'continuous', " + + "estimates are calculated as the average of the target given each category" + + "Note that this Param is only used during fitting.", + ParamValidators.inArray(TargetEncoder.supportedTargetTypes)) + + setDefault(targetType -> TargetEncoder.TARGET_BINARY) + + final def getTargetType: String = $(targetType) + + @Since("4.0.0") + val smoothing: DoubleParam = new DoubleParam(this, "smoothing", + "Smoothing factor for encodings. Smoothing blends in-class estimates with overall estimates " + + "according to the relative size of the particular class on the whole dataset, reducing the " + + "risk of overfitting due to unreliable estimates", + ParamValidators.gtEq(0.0)) + + setDefault(smoothing -> 0.0) + + final def getSmoothing: Double = $(smoothing) + + private[feature] lazy val inputFeatures = if (isSet(inputCol)) { + Array($(inputCol)) + } else if (isSet(inputCols)) { + $(inputCols) + } else { + Array.empty[String] + } + + private[feature] lazy val outputFeatures = if (isSet(outputCol)) { + Array($(outputCol)) + } else if (isSet(outputCols)) { + $(outputCols) + } else { + inputFeatures.map{field: String => s"${field}_indexed"} + } Review Comment: ```suggestion private[feature] lazy val outputFeatures = if (isSet(outputCol)) { Array($(outputCol)) } else if (isSet(outputCols)) { $(outputCols) } else { inputFeatures.map{field: String => s"${field}_indexed"} } ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); Review Comment: ```suggestion RowFactory.create((short) 0, 3, 5.0, 0.0, 1.0 / 3, 0.0, 1.0 / 3, (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (1 - 5.0 / 6) * (4.0 / 9), (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9)), RowFactory.create((short) 1, 4, 5.0, 1.0, 2.0 / 3, 1.0, 1.0 / 3, (3.0 / 4) * (2.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (4.0 / 5) * 1 + (1 - 4.0 / 5) * (4.0 / 9), (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9)), RowFactory.create((short) 2, 3, 5.0, 0.0, 1.0 / 3, 0.0, 1.0 / 3, (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (1 - 5.0 / 6) * (4.0 / 9), (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9)), RowFactory.create((short) 0, 4, 6.0, 1.0, 1.0 / 3, 1.0, 2.0 / 3, (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (4.0 / 5) * 1 + (1 - 4.0 / 5) * (4.0 / 9), (3.0 / 4) * (2.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9)), RowFactory.create((short) 1, 3, 6.0, 0.0, 2.0 / 3, 0.0, 2.0 / 3, (3.0 / 4) * (2.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (1 - 5.0 / 6) * (4.0 / 9), (3.0 / 4) * (2.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9)), RowFactory.create((short) 2, 4, 6.0, 1.0, 1.0 / 3, 1.0, 2.0 / 3, (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (4.0 / 5) * 1 + (1 - 4.0 / 5) * (4.0 / 9), (3.0 / 4) * (2.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9)), RowFactory.create((short) 0, 3, 7.0, 0.0, 1.0 / 3, 0.0, 0.0, (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (1 - 5.0 / 6) * (4.0 / 9), (1 - 1.0 / 2) * (4.0 / 9)), RowFactory.create((short) 1, 4, 8.0, 1.0, 2.0 / 3, 1.0, 1.0, (3.0 / 4) * (2.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (4.0 / 5) * 1 + (1 - 4.0 / 5) * (4.0 / 9), (1.0 / 2) + (1 - 1.0 / 2) * (4.0 / 9)), RowFactory.create((short) 2, 3, null, 0.0, 1.0 / 3, 0.0, 0.0, (3.0 / 4) * (1.0 / 3) + (1 - 3.0 / 4) * (4.0 / 9), (1 - 5.0 / 6) * (4.0 / 9), (1 - 1.0 / 2) * (4.0 / 9))); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); + + } + + @Test + public void testTargetEncoderContinuous() { + + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 10.0, 40.0, 50.0, 20.0, 42.5, 50.0, 27.5), + RowFactory.create((short)1, 4, 5.0, 20.0, 50.0, 50.0, 20.0, 50.0, 50.0, 27.5), + RowFactory.create((short)2, 3, 5.0, 30.0, 60.0, 50.0, 20.0, 57.5, 50.0, 27.5), + RowFactory.create((short)0, 4, 6.0, 40.0, 40.0, 50.0, 50.0, 42.5, 50.0, 50.0), + RowFactory.create((short)1, 3, 6.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0), + RowFactory.create((short)2, 4, 6.0, 60.0, 60.0, 50.0, 50.0, 57.5, 50.0, 50.0), + RowFactory.create((short)0, 3, 7.0, 70.0, 40.0, 50.0, 70.0, 42.5, 50.0, 60.0), + RowFactory.create((short)1, 4, 8.0, 80.0, 50.0, 50.0, 80.0, 50.0, 50.0, 65.0), + RowFactory.create((short)2, 3, null, 90.0, 60.0, 50.0, 90.0, 57.5, 50.0, 70.0)); + + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("continuous"); Review Comment: ```suggestion TargetEncoder encoder = new TargetEncoder() .setInputCols(new String[]{"input1", "input2", "input3"}) .setOutputCols(new String[]{"output1", "output2", "output3"}) .setTargetType("continuous"); ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); + + } + + @Test + public void testTargetEncoderContinuous() { + + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 10.0, 40.0, 50.0, 20.0, 42.5, 50.0, 27.5), + RowFactory.create((short)1, 4, 5.0, 20.0, 50.0, 50.0, 20.0, 50.0, 50.0, 27.5), + RowFactory.create((short)2, 3, 5.0, 30.0, 60.0, 50.0, 20.0, 57.5, 50.0, 27.5), + RowFactory.create((short)0, 4, 6.0, 40.0, 40.0, 50.0, 50.0, 42.5, 50.0, 50.0), + RowFactory.create((short)1, 3, 6.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0), + RowFactory.create((short)2, 4, 6.0, 60.0, 60.0, 50.0, 50.0, 57.5, 50.0, 50.0), + RowFactory.create((short)0, 3, 7.0, 70.0, 40.0, 50.0, 70.0, 42.5, 50.0, 60.0), + RowFactory.create((short)1, 4, 8.0, 80.0, 50.0, 50.0, 80.0, 50.0, 50.0, 65.0), + RowFactory.create((short)2, 3, null, 90.0, 60.0, 50.0, 90.0, 57.5, 50.0, 70.0)); + + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) Review Comment: ```suggestion StructType schema = createStructType(new StructField[]{ createStructField("input1", ShortType, true), createStructField("input2", IntegerType, true), createStructField("input3", DoubleType, true), createStructField("label", DoubleType, false), createStructField("expected1", DoubleType, false), createStructField("expected2", DoubleType, false), createStructField("expected3", DoubleType, false), createStructField("smoothing1", DoubleType, false), createStructField("smoothing2", DoubleType, false), createStructField("smoothing3", DoubleType, false) ``` ########## mllib/src/test/java/org/apache/spark/ml/feature/JavaTargetEncoderSuite.java: ########## @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.feature; + +import org.apache.spark.SharedSparkSession; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.RowFactory; +import org.apache.spark.sql.types.StructField; +import org.apache.spark.sql.types.StructType; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.spark.sql.types.DataTypes.*; + +public class JavaTargetEncoderSuite extends SharedSparkSession { + + @Test + public void testTargetEncoderBinary() { + + // checkstyle.off: LineLength + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 4, 5.0, 1.0, 2.0/3, 1.0, 1.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 3, 5.0, 0.0, 1.0/3, 0.0, 1.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)1, 3, 6.0, 0.0, 2.0/3, 0.0, 2.0/3, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)2, 4, 6.0, 1.0, 1.0/3, 1.0, 2.0/3, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9)), + RowFactory.create((short)0, 3, 7.0, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9)), + RowFactory.create((short)1, 4, 8.0, 1.0, 2.0/3, 1.0, 1.0, (3.0/4)*(2.0/3)+(1-3.0/4)*(4.0/9), + (4.0/5)*1+(1-4.0/5)*(4.0/9), (1.0/2)+(1-1.0/2)*(4.0/9)), + RowFactory.create((short)2, 3, null, 0.0, 1.0/3, 0.0, 0.0, (3.0/4)*(1.0/3)+(1-3.0/4)*(4.0/9), + (1-5.0/6)*(4.0/9), (1-1.0/2)*(4.0/9))); + // checkstyle.off: LineLength + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("binary"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); + + Dataset<Row> output_smoothing = model.setSmoothing(1.0).transform(dataset); + Assertions.assertEquals( + output_smoothing.select("output1", "output2", "output3").collectAsList(), + output_smoothing.select("smoothing1", "smoothing2", "smoothing3").collectAsList()); + + } + + @Test + public void testTargetEncoderContinuous() { + + List<Row> data = Arrays.asList( + RowFactory.create((short)0, 3, 5.0, 10.0, 40.0, 50.0, 20.0, 42.5, 50.0, 27.5), + RowFactory.create((short)1, 4, 5.0, 20.0, 50.0, 50.0, 20.0, 50.0, 50.0, 27.5), + RowFactory.create((short)2, 3, 5.0, 30.0, 60.0, 50.0, 20.0, 57.5, 50.0, 27.5), + RowFactory.create((short)0, 4, 6.0, 40.0, 40.0, 50.0, 50.0, 42.5, 50.0, 50.0), + RowFactory.create((short)1, 3, 6.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0), + RowFactory.create((short)2, 4, 6.0, 60.0, 60.0, 50.0, 50.0, 57.5, 50.0, 50.0), + RowFactory.create((short)0, 3, 7.0, 70.0, 40.0, 50.0, 70.0, 42.5, 50.0, 60.0), + RowFactory.create((short)1, 4, 8.0, 80.0, 50.0, 50.0, 80.0, 50.0, 50.0, 65.0), + RowFactory.create((short)2, 3, null, 90.0, 60.0, 50.0, 90.0, 57.5, 50.0, 70.0)); + + StructType schema = createStructType(new StructField[]{ + createStructField("input1", ShortType, true), + createStructField("input2", IntegerType, true), + createStructField("input3", DoubleType, true), + createStructField("label", DoubleType, false), + createStructField("expected1", DoubleType, false), + createStructField("expected2", DoubleType, false), + createStructField("expected3", DoubleType, false), + createStructField("smoothing1", DoubleType, false), + createStructField("smoothing2", DoubleType, false), + createStructField("smoothing3", DoubleType, false) + }); + + Dataset<Row> dataset = spark.createDataFrame(data, schema); + + TargetEncoder encoder = new TargetEncoder() + .setInputCols(new String[]{"input1", "input2", "input3"}) + .setOutputCols(new String[]{"output1", "output2", "output3"}) + .setTargetType("continuous"); + TargetEncoderModel model = encoder.fit(dataset); + + Dataset<Row> output = model.transform(dataset); + Assertions.assertEquals( + output.select("output1", "output2", "output3").collectAsList(), + output.select("expected1", "expected2", "expected3").collectAsList()); Review Comment: ```suggestion Assertions.assertEquals( output.select("output1", "output2", "output3").collectAsList(), output.select("expected1", "expected2", "expected3").collectAsList()); ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
