[GitHub] [flink-ml] yunfengzhou-hub commented on a diff in pull request #160: [FLINK-29434] Add AlgoOperator for Splitter

2022-11-02 Thread GitBox


yunfengzhou-hub commented on code in PR #160:
URL: https://github.com/apache/flink-ml/pull/160#discussion_r1012423267


##
flink-ml-python/pyflink/ml/lib/feature/randomsplitter.py:
##
@@ -0,0 +1,80 @@
+
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing
+from typing import Tuple
+from pyflink.ml.core.param import Param, FloatArrayParam, ParamValidator
+from pyflink.ml.core.wrapper import JavaWithParams
+from pyflink.ml.lib.feature.common import JavaFeatureTransformer
+
+
+class _SplitterParams(
+JavaWithParams
+):
+"""
+Checks the weights parameter.
+"""
+def weights_validator(self) -> ParamValidator[Tuple[float]]:
+class WeightsValidator(ParamValidator[Tuple[float]]):
+def validate(self, weights: Tuple[float]) -> bool:
+for val in weights:
+if val <= 0.0 or val >= 1.0:
+return False
+weights_set = set(weights)
+if len(weights_set) != len(weights):
+return False
+return len(weights_set) != 0
+return WeightsValidator()
+
+"""
+Params for :class:`RandomSplitter`.
+"""
+WEIGHTS: Param[Tuple[float]] = FloatArrayParam(
+"weights",
+"The weights of data splitting.",
+[0.5],

Review Comment:
   The default value should be [1.0, 1.0].



##
flink-ml-python/pyflink/ml/lib/feature/randomsplitter.py:
##
@@ -0,0 +1,80 @@
+
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing
+from typing import Tuple
+from pyflink.ml.core.param import Param, FloatArrayParam, ParamValidator
+from pyflink.ml.core.wrapper import JavaWithParams
+from pyflink.ml.lib.feature.common import JavaFeatureTransformer
+
+
+class _SplitterParams(
+JavaWithParams
+):
+"""
+Checks the weights parameter.
+"""
+def weights_validator(self) -> ParamValidator[Tuple[float]]:
+class WeightsValidator(ParamValidator[Tuple[float]]):
+def validate(self, weights: Tuple[float]) -> bool:
+for val in weights:
+if val <= 0.0 or val >= 1.0:
+return False
+weights_set = set(weights)
+if len(weights_set) != len(weights):
+return False
+return len(weights_set) != 0
+return WeightsValidator()

Review Comment:
   Let's make the validator the same as that in Java.



##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/randomsplitter/RandomSplitter.java:
##
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or 

[GitHub] [flink-ml] yunfengzhou-hub commented on a diff in pull request #160: [FLINK-29434] Add AlgoOperator for Splitter

2022-10-27 Thread GitBox


yunfengzhou-hub commented on code in PR #160:
URL: https://github.com/apache/flink-ml/pull/160#discussion_r1007543761


##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/randomsplitter/RandomSplitterParams.java:
##
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature.randomsplitter;
+
+import org.apache.flink.ml.param.DoubleArrayParam;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.param.ParamValidator;
+import org.apache.flink.ml.param.WithParams;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Params of {@link RandomSplitter}.
+ *
+ * @param  The class type of this instance.
+ */
+public interface RandomSplitterParams extends WithParams {
+Param WEIGHTS =
+new DoubleArrayParam(
+"weights", "The weights of data splitting.", null, 
weightsValidator());

Review Comment:
   Weight is different from fraction. Weight means the proportion of the number 
of elements in each output split table,  instead of a possibility threshold 
like the fraction in the previous implementation.
   
   `setWeight(1,1)` is equal to `setThreshold(0.5)`, `setWeight(1,2,2)` is 
equal to `setThreshold(0.2,0.6)`.



##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/randomsplitter/RandomSplitter.java:
##
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature.randomsplitter;
+
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.OutputTag;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
+/** An AlgoOperator which splits a datastream into N datastreams according to 
the given weights. */
+public class RandomSplitter
+implements AlgoOperator, 
RandomSplitterParams {
+private final Map, Object> paramMap = new HashMap<>();
+
+public RandomSplitter() {
+ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+}
+
+@Override
+public Table[] transform(Table... inputs) {
+Preconditions.checkArgument(inputs.length == 1);
+StreamTableEnvironment tEnv =
+(StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+RowTypeInfo outputTypeInfo = 
TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema());
+
+final Double[] weights = getWeights();
+OutputTag[] outputTags = new OutputTag[weights.length];
+for (int i = 0; i < 

[GitHub] [flink-ml] yunfengzhou-hub commented on a diff in pull request #160: [FLINK-29434] Add AlgoOperator for Splitter

2022-10-24 Thread GitBox


yunfengzhou-hub commented on code in PR #160:
URL: https://github.com/apache/flink-ml/pull/160#discussion_r1003935113


##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/randomsplitter/RandomSplitterParams.java:
##
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature.randomsplitter;
+
+import org.apache.flink.ml.param.DoubleArrayParam;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.param.ParamValidator;
+import org.apache.flink.ml.param.WithParams;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Params of {@link RandomSplitter}.
+ *
+ * @param  The class type of this instance.
+ */
+public interface RandomSplitterParams extends WithParams {
+Param FRACTIONS =
+new DoubleArrayParam(
+"fractions",

Review Comment:
   Do you think it would be more intuitive to use `weights` instead of 
`fractions`? `fractions` seems to be thresholds of the randomly generated 
doubles in the implementation detail of this algorithm, and I think `weights` 
might be simpler and easier to understand than `fractions`.



##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/randomsplitter/RandomSplitterParams.java:
##
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature.randomsplitter;
+
+import org.apache.flink.ml.param.DoubleArrayParam;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.param.ParamValidator;
+import org.apache.flink.ml.param.WithParams;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Params of {@link RandomSplitter}.
+ *
+ * @param  The class type of this instance.
+ */
+public interface RandomSplitterParams extends WithParams {
+Param FRACTIONS =
+new DoubleArrayParam(
+"fractions",
+"The fractions of data splitting.",
+new Double[] {0.5},

Review Comment:
   The `weights` parameter of Spark's `RDD.randomSplit()` is a required 
parameter, thus it might be better to make the default value of our `fraction` 
to be `null` to match Spark's behavior.



##
flink-ml-lib/src/test/java/org/apache/flink/ml/feature/RandomSplitterTest.java:
##
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.feature.randomsplitter.RandomSplitter;
+import 

[GitHub] [flink-ml] yunfengzhou-hub commented on a diff in pull request #160: [FLINK-29434] Add AlgoOperator for Splitter

2022-10-12 Thread GitBox


yunfengzhou-hub commented on code in PR #160:
URL: https://github.com/apache/flink-ml/pull/160#discussion_r993058402


##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/splitter/Splitter.java:
##
@@ -0,0 +1,111 @@
+/*

Review Comment:
   In order to align the functionality of this algorithm with Spark's 
`DataSet.randomSplit`, do you think the following API would be better?
   - rename the algorithm from `Splitter` to `RandomSplit`.
   - The parameter of this algorithm is a `double[]` whose length >= 2 and 
contains positive values. The positive values are the weights, proportional to 
the number of records to be placed in the split datastreams.
 - If the sum of the values in the double array is not 1, they will be 
normalized.
   - The output is a Table array whose size is equal to the double array 
parameter's length.



##
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/splitter/Splitter.java:
##
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature.splitter;
+
+import org.apache.flink.api.common.functions.FlatMapFunction;
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
+/** An AlgoOperator which splits a dataset into two datasets according to a 
given fraction. */

Review Comment:
   nit: dataset -> datastream.



##
docs/content/docs/operators/feature/splitter.md:
##
@@ -0,0 +1,143 @@
+---
+title: "Splitter"
+weight: 1
+type: docs
+aliases:
+- /operators/feature/splitter.html
+---
+
+
+
+## Splitter
+
+An AlgoOperator which splits a dataset into two datasets according to a given 
fraction.
+
+### Parameters

Review Comment:
   Let's describe the input and output of this operator as well.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@flink.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org