yunfengzhou-hub commented on code in PR #135:
URL: https://github.com/apache/flink-ml/pull/135#discussion_r935044159


##########
flink-ml-lib/src/test/java/org/apache/flink/ml/feature/ElementwiseProductTest.java:
##########
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.feature.elementwiseproduct.ElementwiseProduct;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.linalg.SparseVector;
+import org.apache.flink.ml.linalg.Vectors;
+import org.apache.flink.ml.util.TestUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import 
org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+/** Tests {@link ElementwiseProduct}. */
+public class ElementwiseProductTest extends AbstractTestBase {
+
+    private StreamTableEnvironment tEnv;
+    private Table inputDataTable;
+
+    private static final List<Row> INPUT_DATA =
+            Arrays.asList(
+                    Row.of(
+                            0,
+                            Vectors.dense(2.1, 3.1),
+                            Vectors.sparse(5, new int[] {3}, new double[] 
{1.0})),
+                    Row.of(
+                            1,
+                            Vectors.dense(1.1, 3.3),
+                            Vectors.sparse(
+                                    5, new int[] {4, 2, 3, 1}, new double[] 
{4.0, 2.0, 3.0, 1.0})),
+                    Row.of(2, null, null));
+
+    private static final DenseVector EXPECTED_OUTPUT_DATA_1 = 
Vectors.dense(2.31, 3.41);
+    private static final DenseVector EXPECTED_OUTPUT_DATA_2 = 
Vectors.dense(1.21, 3.63);
+    private static final SparseVector EXPECTED_OUTPUT_DATA_3 =
+            Vectors.sparse(5, new int[] {3}, new double[] {0.0});
+    private static final SparseVector EXPECTED_OUTPUT_DATA_4 =
+            Vectors.sparse(5, new int[] {1, 2, 3, 4}, new double[] {1.1, 0.0, 
0.0, 0.0});
+
+    @Before
+    public void before() {
+        Configuration config = new Configuration();
+        
config.set(ExecutionCheckpointingOptions.ENABLE_CHECKPOINTS_AFTER_TASKS_FINISH, 
true);
+        StreamExecutionEnvironment env = 
StreamExecutionEnvironment.getExecutionEnvironment(config);
+        env.setParallelism(4);
+        env.enableCheckpointing(100);
+        env.setRestartStrategy(RestartStrategies.noRestart());
+        tEnv = StreamTableEnvironment.create(env);
+        DataStream<Row> dataStream = env.fromCollection(INPUT_DATA);
+        inputDataTable = tEnv.fromDataStream(dataStream).as("id", "vec", 
"sparseVec");
+    }
+
+    private void verifyOutputResult(Table output, String outputCol, boolean 
isSparse)

Review Comment:
   Could you please verify that the input data is not affected after this 
transformation? I found that while output is `[1.21, 3.63]`, input is also 
changed into `[1.21, 3.63]`. There might be a bug here.



##########
flink-ml-lib/src/test/java/org/apache/flink/ml/feature/ElementwiseProductTest.java:
##########
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.feature.elementwiseproduct.ElementwiseProduct;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.linalg.SparseVector;
+import org.apache.flink.ml.linalg.Vectors;
+import org.apache.flink.ml.util.TestUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import 
org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+/** Tests {@link ElementwiseProduct}. */
+public class ElementwiseProductTest extends AbstractTestBase {
+
+    private StreamTableEnvironment tEnv;
+    private Table inputDataTable;
+
+    private static final List<Row> INPUT_DATA =
+            Arrays.asList(
+                    Row.of(
+                            0,
+                            Vectors.dense(2.1, 3.1),
+                            Vectors.sparse(5, new int[] {3}, new double[] 
{1.0})),
+                    Row.of(
+                            1,
+                            Vectors.dense(1.1, 3.3),
+                            Vectors.sparse(
+                                    5, new int[] {4, 2, 3, 1}, new double[] 
{4.0, 2.0, 3.0, 1.0})),
+                    Row.of(2, null, null));
+
+    private static final DenseVector EXPECTED_OUTPUT_DATA_1 = 
Vectors.dense(2.31, 3.41);
+    private static final DenseVector EXPECTED_OUTPUT_DATA_2 = 
Vectors.dense(1.21, 3.63);
+    private static final SparseVector EXPECTED_OUTPUT_DATA_3 =
+            Vectors.sparse(5, new int[] {3}, new double[] {0.0});
+    private static final SparseVector EXPECTED_OUTPUT_DATA_4 =
+            Vectors.sparse(5, new int[] {1, 2, 3, 4}, new double[] {1.1, 0.0, 
0.0, 0.0});
+
+    @Before
+    public void before() {
+        Configuration config = new Configuration();
+        
config.set(ExecutionCheckpointingOptions.ENABLE_CHECKPOINTS_AFTER_TASKS_FINISH, 
true);
+        StreamExecutionEnvironment env = 
StreamExecutionEnvironment.getExecutionEnvironment(config);
+        env.setParallelism(4);
+        env.enableCheckpointing(100);
+        env.setRestartStrategy(RestartStrategies.noRestart());
+        tEnv = StreamTableEnvironment.create(env);
+        DataStream<Row> dataStream = env.fromCollection(INPUT_DATA);
+        inputDataTable = tEnv.fromDataStream(dataStream).as("id", "vec", 
"sparseVec");
+    }
+
+    private void verifyOutputResult(Table output, String outputCol, boolean 
isSparse)
+            throws Exception {
+        DataStream<Row> dataStream = tEnv.toDataStream(output);
+        List<Row> results = 
IteratorUtils.toList(dataStream.executeAndCollect());
+        assertEquals(3, results.size());
+        for (Row result : results) {
+            if (result.getField(0) == (Object) 0) {
+                if (isSparse) {
+                    assertEquals(EXPECTED_OUTPUT_DATA_3, 
result.getField(outputCol));
+                } else {
+                    assertArrayEquals(
+                            EXPECTED_OUTPUT_DATA_1.values,

Review Comment:
   Can we use the same comparison command for different situations, like for 
SparseVector and DenseVector, and even for null values as well? It might help 
to improve the readability of the test cases.
   
   Besides, let's also try to refractor names like `EXPECTED_OUTPUT_DATA_1` and 
`EXPECTED_OUTPUT_DATA_3`. These names seem to be of low readability.



##########
flink-ml-python/pyflink/ml/core/tests/test_stage.py:
##########
@@ -21,14 +21,16 @@
 from pyflink.table import StreamTableEnvironment
 
 from pyflink.ml.core.api import Stage
+from pyflink.ml.core.linalg import Vectors
 from pyflink.ml.core.param import ParamValidators, Param, BooleanParam, 
IntParam, \
-    FloatParam, StringParam, IntArrayParam, FloatArrayParam, StringArrayParam
+    FloatParam, StringParam, VectorParam, IntArrayParam, FloatArrayParam, 
StringArrayParam
 from pyflink.ml.tests.test_utils import PyFlinkMLTestCase
 
 BOOLEAN_PARAM = BooleanParam("boolean_param", "Description", False)
 INT_PARAM = IntParam("int_param", "Description", 1, ParamValidators.lt(100))
 FLOAT_PARAM = FloatParam("float_param", "Description", 3.0, 
ParamValidators.lt(100))
 STRING_PARAM = StringParam('string_param', "Description", "5")
+VECTOR_PARAM = VectorParam('vector_param', "Description", Vectors.dense(1, 2, 
3))

Review Comment:
   It might be unable to verify the correctness of `VectorParam` by just 
creating the `VECTOR_PARAM`. Could you please refer to tests for other 
parameters, like `IntParam`, and add corresponding test cases about 
`VectorParam`?



##########
flink-ml-lib/src/test/java/org/apache/flink/ml/feature/ElementwiseProductTest.java:
##########
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.feature.elementwiseproduct.ElementwiseProduct;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.linalg.SparseVector;
+import org.apache.flink.ml.linalg.Vectors;
+import org.apache.flink.ml.util.TestUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import 
org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+/** Tests {@link ElementwiseProduct}. */
+public class ElementwiseProductTest extends AbstractTestBase {

Review Comment:
   Let's add a `testOutputSchema` test case like in other algorithms as well.



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/feature/elementwiseproduct/ElementwiseProduct.java:
##########
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.feature.elementwiseproduct;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.Transformer;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.linalg.BLAS;
+import org.apache.flink.ml.linalg.Vector;
+import org.apache.flink.ml.linalg.typeinfo.VectorTypeInfo;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * ElementwiseProduct is a transformer that multiplies each input vector with 
a given scaling vector
+ * using Hadamard product.
+ *
+ * <p>If input vector is null, then the transformer will return null. If input 
vector size not

Review Comment:
   It seems that there is still a grammar error in this paragraph. Could you 
please check and fix it?
   
   I found that Grammarly is a helpful tool for finding out grammar errors. You 
may also try and use it to refractor the documents.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to