zhipeng93 commented on a change in pull request #24:
URL: https://github.com/apache/flink-ml/pull/24#discussion_r768451170



##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/Knn.java
##########
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.ml.api.Estimator;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Estimator which implements the KNN algorithm.
+ *
+ * <p>See: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
+ */
+public class Knn implements Estimator<Knn, KnnModel>, KnnParams<Knn> {
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public Knn() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel fit(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        /* Tuple3 : <feature, label, norm> */
+        DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm =
+                computeNorm(tEnv.toDataStream(inputs[0]));
+        DataStream<KnnModelData> modelData = genModelData(inputDataWithNorm);
+        KnnModel model = new 
KnnModel().setModelData(tEnv.fromDataStream(modelData));
+        ReadWriteUtils.updateExistingParams(model, getParamMap());
+        return model;
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static Knn load(StreamExecutionEnvironment env, String path) throws 
IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    /**
+     * Generates knn model data.

Review comment:
       Can you update the comments here to explain why KnnModel data is 
`packedFeatures, labels, norms`? 

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/Knn.java
##########
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.ml.api.Estimator;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Estimator which implements the KNN algorithm.
+ *
+ * <p>See: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
+ */
+public class Knn implements Estimator<Knn, KnnModel>, KnnParams<Knn> {
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public Knn() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel fit(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        /* Tuple3 : <feature, label, norm> */
+        DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm =
+                computeNorm(tEnv.toDataStream(inputs[0]));
+        DataStream<KnnModelData> modelData = genModelData(inputDataWithNorm);
+        KnnModel model = new 
KnnModel().setModelData(tEnv.fromDataStream(modelData));
+        ReadWriteUtils.updateExistingParams(model, getParamMap());
+        return model;
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static Knn load(StreamExecutionEnvironment env, String path) throws 
IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    /**
+     * Generates knn model data.
+     *
+     * @param inputDataWithNorm Input data with feature norm.
+     * @return Knn model.
+     */
+    private static DataStream<KnnModelData> genModelData(
+            DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm) 
{
+        DataStream<KnnModelData> modelData =
+                DataStreamUtils.mapPartition(
+                        inputDataWithNorm,
+                        new RichMapPartitionFunction<
+                                Tuple3<DenseVector, Double, Double>, 
KnnModelData>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple3<DenseVector, Double, 
Double>> values,
+                                    Collector<KnnModelData> out) {
+                                List<Tuple3<DenseVector, Double, Double>> 
buffer =
+                                        new ArrayList<>(1);
+                                for (Tuple3<DenseVector, Double, Double> value 
: values) {
+                                    buffer.add(value);
+                                }
+                                int featureDim = buffer.get(0).f0.size();
+                                DenseMatrix packedFeatures =
+                                        new DenseMatrix(featureDim, 
buffer.size());
+                                DenseVector featureNorms = new 
DenseVector(buffer.size());
+                                DenseVector labels = new 
DenseVector(buffer.size());
+                                int offset = 0;
+                                for (Tuple3<DenseVector, Double, Double> data 
: buffer) {
+                                    System.arraycopy(
+                                            data.f0.values,
+                                            0,
+                                            packedFeatures.values,
+                                            offset * featureDim,
+                                            featureDim);
+                                    labels.values[offset] = data.f1;
+                                    featureNorms.values[offset++] = data.f2;
+                                }
+                                out.collect(new KnnModelData(packedFeatures, 
featureNorms, labels));
+                            }
+                        });
+        modelData.getTransformation().setParallelism(1);
+        return modelData;
+    }
+
+    /**
+     * For Euclidean distance, distance = sqrt((a - b)^2) = (sqrt(a^2 + b^2 - 
2ab)) So it can
+     * pre-calculate the L2 norm square of the feature vector, and when 
calculating the distance
+     * with another feature vector, only dot product is calculated.
+     *
+     * @param inputData Input data.
+     * @return Input data with norm.
+     */
+    DataStream<Tuple3<DenseVector, Double, Double>> 
computeNorm(DataStream<Row> inputData) {

Review comment:
       This method could be private.

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/Knn.java
##########
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.ml.api.Estimator;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Estimator which implements the KNN algorithm.
+ *
+ * <p>See: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
+ */
+public class Knn implements Estimator<Knn, KnnModel>, KnnParams<Knn> {
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public Knn() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel fit(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        /* Tuple3 : <feature, label, norm> */
+        DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm =
+                computeNorm(tEnv.toDataStream(inputs[0]));
+        DataStream<KnnModelData> modelData = genModelData(inputDataWithNorm);
+        KnnModel model = new 
KnnModel().setModelData(tEnv.fromDataStream(modelData));
+        ReadWriteUtils.updateExistingParams(model, getParamMap());
+        return model;
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static Knn load(StreamExecutionEnvironment env, String path) throws 
IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    /**
+     * Generates knn model data.
+     *
+     * @param inputDataWithNorm Input data with feature norm.
+     * @return Knn model.
+     */
+    private static DataStream<KnnModelData> genModelData(
+            DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm) 
{
+        DataStream<KnnModelData> modelData =
+                DataStreamUtils.mapPartition(
+                        inputDataWithNorm,
+                        new RichMapPartitionFunction<
+                                Tuple3<DenseVector, Double, Double>, 
KnnModelData>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple3<DenseVector, Double, 
Double>> values,
+                                    Collector<KnnModelData> out) {
+                                List<Tuple3<DenseVector, Double, Double>> 
buffer =
+                                        new ArrayList<>(1);
+                                for (Tuple3<DenseVector, Double, Double> value 
: values) {
+                                    buffer.add(value);
+                                }
+                                int featureDim = buffer.get(0).f0.size();
+                                DenseMatrix packedFeatures =
+                                        new DenseMatrix(featureDim, 
buffer.size());
+                                DenseVector featureNorms = new 
DenseVector(buffer.size());
+                                DenseVector labels = new 
DenseVector(buffer.size());
+                                int offset = 0;
+                                for (Tuple3<DenseVector, Double, Double> data 
: buffer) {
+                                    System.arraycopy(
+                                            data.f0.values,
+                                            0,
+                                            packedFeatures.values,
+                                            offset * featureDim,
+                                            featureDim);
+                                    labels.values[offset] = data.f1;
+                                    featureNorms.values[offset++] = data.f2;
+                                }
+                                out.collect(new KnnModelData(packedFeatures, 
featureNorms, labels));
+                            }
+                        });
+        modelData.getTransformation().setParallelism(1);
+        return modelData;
+    }
+
+    /**
+     * For Euclidean distance, distance = sqrt((a - b)^2) = (sqrt(a^2 + b^2 - 
2ab)) So it can
+     * pre-calculate the L2 norm square of the feature vector, and when 
calculating the distance
+     * with another feature vector, only dot product is calculated.
+     *
+     * @param inputData Input data.
+     * @return Input data with norm.
+     */
+    DataStream<Tuple3<DenseVector, Double, Double>> 
computeNorm(DataStream<Row> inputData) {
+        return inputData.map(
+                new MapFunction<Row, Tuple3<DenseVector, Double, Double>>() {
+                    @Override
+                    public Tuple3<DenseVector, Double, Double> map(Row value) {
+                        Double label = (Double) value.getField(getLabelCol());
+                        DenseVector feature = (DenseVector) 
value.getField(getFeaturesCol());
+                        double featureNorm = 0;
+                        for (int i = 0; i < feature.size(); ++i) {
+                            featureNorm += feature.values[i] * 
feature.values[i];

Review comment:
       Is this L2 norm? L2 norm is \sqrt(_sum_i x_i^2), right?
   

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/Knn.java
##########
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.ml.api.Estimator;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Estimator which implements the KNN algorithm.
+ *
+ * <p>See: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
+ */
+public class Knn implements Estimator<Knn, KnnModel>, KnnParams<Knn> {
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public Knn() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel fit(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        /* Tuple3 : <feature, label, norm> */
+        DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm =
+                computeNorm(tEnv.toDataStream(inputs[0]));
+        DataStream<KnnModelData> modelData = genModelData(inputDataWithNorm);
+        KnnModel model = new 
KnnModel().setModelData(tEnv.fromDataStream(modelData));
+        ReadWriteUtils.updateExistingParams(model, getParamMap());
+        return model;
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static Knn load(StreamExecutionEnvironment env, String path) throws 
IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    /**
+     * Generates knn model data.
+     *
+     * @param inputDataWithNorm Input data with feature norm.
+     * @return Knn model.
+     */
+    private static DataStream<KnnModelData> genModelData(
+            DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm) 
{
+        DataStream<KnnModelData> modelData =
+                DataStreamUtils.mapPartition(
+                        inputDataWithNorm,
+                        new RichMapPartitionFunction<
+                                Tuple3<DenseVector, Double, Double>, 
KnnModelData>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple3<DenseVector, Double, 
Double>> values,
+                                    Collector<KnnModelData> out) {
+                                List<Tuple3<DenseVector, Double, Double>> 
buffer =
+                                        new ArrayList<>(1);

Review comment:
       Can we rename `buffer` here to `dataPoints`?

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/Knn.java
##########
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.ml.api.Estimator;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Estimator which implements the KNN algorithm.
+ *
+ * <p>See: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
+ */
+public class Knn implements Estimator<Knn, KnnModel>, KnnParams<Knn> {
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public Knn() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel fit(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        /* Tuple3 : <feature, label, norm> */
+        DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm =
+                computeNorm(tEnv.toDataStream(inputs[0]));
+        DataStream<KnnModelData> modelData = genModelData(inputDataWithNorm);
+        KnnModel model = new 
KnnModel().setModelData(tEnv.fromDataStream(modelData));
+        ReadWriteUtils.updateExistingParams(model, getParamMap());
+        return model;
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static Knn load(StreamExecutionEnvironment env, String path) throws 
IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    /**
+     * Generates knn model data.
+     *
+     * @param inputDataWithNorm Input data with feature norm.
+     * @return Knn model.
+     */
+    private static DataStream<KnnModelData> genModelData(
+            DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm) 
{
+        DataStream<KnnModelData> modelData =
+                DataStreamUtils.mapPartition(
+                        inputDataWithNorm,
+                        new RichMapPartitionFunction<
+                                Tuple3<DenseVector, Double, Double>, 
KnnModelData>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple3<DenseVector, Double, 
Double>> values,
+                                    Collector<KnnModelData> out) {
+                                List<Tuple3<DenseVector, Double, Double>> 
buffer =
+                                        new ArrayList<>(1);
+                                for (Tuple3<DenseVector, Double, Double> value 
: values) {

Review comment:
       nits: Can we do the following renaming?
   
   `value` -> `dataPoint`
   `buffer` -> `bufferedDataPoints`
   `values` -> `dataPoints`

##########
File path: flink-ml-core/src/main/java/org/apache/flink/ml/linalg/BLAS.java
##########
@@ -52,4 +52,39 @@ public static double norm2(DenseVector x) {
     public static void scal(double a, DenseVector x) {
         JAVA_BLAS.dscal(x.size(), a, x.values, 1);
     }
+
+    /**
+     * y = alpha * matrix * x + beta * y or y = alpha * (matrix^T) * x + beta 
* y.
+     *
+     * @param matrix Dense matrix with size m x n.
+     * @param transMatrix Whether transposes matrix before multiply.
+     * @param x Dense vector with size n.
+     * @param y Dense vector with size m.

Review comment:
       Can you also add java docs for `alpha` and `beta`?

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/KnnModel.java
##########
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.Model;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.linalg.BLAS;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+
+/** A Model which classifies data using the model data computed by {@link 
Knn}. */
+public class KnnModel implements Model<KnnModel>, KnnModelParams<KnnModel> {
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+    private Table modelDataTable;
+
+    public KnnModel() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel setModelData(Table... modelData) {
+        modelDataTable = modelData[0];
+        return this;
+    }
+
+    @Override
+    public Table[] getModelData() {
+        return new Table[] {modelDataTable};
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Table[] transform(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        DataStream<Row> data = tEnv.toDataStream(inputs[0]);
+        DataStream<KnnModelData> knnModel = 
KnnModelData.getModelDataStream(modelDataTable);
+        final String broadcastModelKey = "broadcastModelKey";
+        RowTypeInfo inputTypeInfo = 
TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema());
+        RowTypeInfo outputTypeInfo =
+                new RowTypeInfo(
+                        ArrayUtils.addAll(
+                                inputTypeInfo.getFieldTypes(), 
BasicTypeInfo.DOUBLE_TYPE_INFO),
+                        ArrayUtils.addAll(inputTypeInfo.getFieldNames(), 
getPredictionCol()));
+        DataStream<Row> output =
+                BroadcastUtils.withBroadcastStream(
+                        Collections.singletonList(data),
+                        Collections.singletonMap(broadcastModelKey, knnModel),
+                        inputList -> {
+                            DataStream input = inputList.get(0);
+                            return input.map(
+                                    new PredictLabelFunction(
+                                            broadcastModelKey, getK(), 
getFeaturesCol()),
+                                    outputTypeInfo);
+                        });
+        return new Table[] {tEnv.fromDataStream(output)};
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+        ReadWriteUtils.saveModelData(
+                KnnModelData.getModelDataStream(modelDataTable),
+                path,
+                new KnnModelData.ModelDataEncoder());
+    }
+
+    /**
+     * Loads model data from path.
+     *
+     * @param env Stream execution environment.
+     * @param path Model path.
+     * @return Knn model.
+     */
+    public static KnnModel load(StreamExecutionEnvironment env, String path) 
throws IOException {
+        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
+        KnnModel model = ReadWriteUtils.loadStageParam(path);
+        DataStream<KnnModelData> modelData =
+                ReadWriteUtils.loadModelData(env, path, new 
KnnModelData.ModelDataDecoder());
+        return model.setModelData(tEnv.fromDataStream(modelData));
+    }
+
+    /** This operator loads model data and predicts result. */
+    private static class PredictLabelFunction extends RichMapFunction<Row, 
Row> {
+        private final String featureCol;
+        private KnnModelData knnModelData;
+        private final int k;
+        private final String broadcastKey;
+
+        public PredictLabelFunction(String broadcastKey, int k, String 
featureCol) {
+            this.k = k;
+            this.broadcastKey = broadcastKey;
+            this.featureCol = featureCol;
+        }
+
+        @Override
+        public Row map(Row row) {
+            if (knnModelData == null) {
+                knnModelData =
+                        (KnnModelData)
+                                
getRuntimeContext().getBroadcastVariable(broadcastKey).get(0);
+            }
+            DenseVector feature = (DenseVector) row.getField(featureCol);
+            double prediction = predict(feature);
+            return Row.join(row, Row.of(prediction));
+        }
+
+        private double predict(DenseVector inputFeature) {
+            PriorityQueue<Tuple2<Double, Double>> priorityQueue =
+                    new PriorityQueue<>(Comparator.comparingDouble(o -> 
-o.f0));
+            double featureNorm = 0.0;

Review comment:
       It is squaredNorm, right?

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/Knn.java
##########
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.ml.api.Estimator;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Estimator which implements the KNN algorithm.
+ *
+ * <p>See: https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm.
+ */
+public class Knn implements Estimator<Knn, KnnModel>, KnnParams<Knn> {
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public Knn() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel fit(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        /* Tuple3 : <feature, label, norm> */
+        DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm =
+                computeNorm(tEnv.toDataStream(inputs[0]));
+        DataStream<KnnModelData> modelData = genModelData(inputDataWithNorm);
+        KnnModel model = new 
KnnModel().setModelData(tEnv.fromDataStream(modelData));
+        ReadWriteUtils.updateExistingParams(model, getParamMap());
+        return model;
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static Knn load(StreamExecutionEnvironment env, String path) throws 
IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    /**
+     * Generates knn model data. For Euclidean distance, distance = sqrt((a - 
b)^2) = (sqrt(a^2 +
+     * b^2 - 2ab)) So it can pre-calculate the L2 norm square of the feature 
vector, and when
+     * calculating the distance with another feature vector, only dot product 
is calculated.
+     *
+     * @param inputDataWithNorm Input data with feature norm.
+     * @return Knn model.
+     */
+    private static DataStream<KnnModelData> genModelData(
+            DataStream<Tuple3<DenseVector, Double, Double>> inputDataWithNorm) 
{
+        DataStream<KnnModelData> modelData =
+                DataStreamUtils.mapPartition(
+                        inputDataWithNorm,
+                        new RichMapPartitionFunction<
+                                Tuple3<DenseVector, Double, Double>, 
KnnModelData>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple3<DenseVector, Double, 
Double>> values,
+                                    Collector<KnnModelData> out) {
+                                List<Tuple3<DenseVector, Double, Double>> 
buffer =
+                                        new ArrayList<>();
+                                for (Tuple3<DenseVector, Double, Double> value 
: values) {
+                                    buffer.add(value);
+                                }
+                                int featureDim = buffer.get(0).f0.size();
+                                DenseMatrix packedFeatures =
+                                        new DenseMatrix(featureDim, 
buffer.size());
+                                DenseVector featureNorms = new 
DenseVector(buffer.size());
+                                DenseVector labels = new 
DenseVector(buffer.size());
+                                int offset = 0;
+                                for (Tuple3<DenseVector, Double, Double> data 
: buffer) {

Review comment:
       data -> dataPoints

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/KnnModel.java
##########
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.Model;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.linalg.BLAS;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+
+/** A Model which classifies data using the model data computed by {@link 
Knn}. */
+public class KnnModel implements Model<KnnModel>, KnnModelParams<KnnModel> {
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+    private Table modelDataTable;
+
+    public KnnModel() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel setModelData(Table... modelData) {
+        modelDataTable = modelData[0];
+        return this;
+    }
+
+    @Override
+    public Table[] getModelData() {
+        return new Table[] {modelDataTable};
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Table[] transform(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        DataStream<Row> data = tEnv.toDataStream(inputs[0]);
+        DataStream<KnnModelData> knnModel = 
KnnModelData.getModelDataStream(modelDataTable);
+        final String broadcastModelKey = "broadcastModelKey";
+        RowTypeInfo inputTypeInfo = 
TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema());
+        RowTypeInfo outputTypeInfo =
+                new RowTypeInfo(
+                        ArrayUtils.addAll(
+                                inputTypeInfo.getFieldTypes(), 
BasicTypeInfo.DOUBLE_TYPE_INFO),
+                        ArrayUtils.addAll(inputTypeInfo.getFieldNames(), 
getPredictionCol()));
+        DataStream<Row> output =
+                BroadcastUtils.withBroadcastStream(
+                        Collections.singletonList(data),
+                        Collections.singletonMap(broadcastModelKey, knnModel),
+                        inputList -> {
+                            DataStream input = inputList.get(0);
+                            return input.map(
+                                    new PredictLabelFunction(
+                                            broadcastModelKey, getK(), 
getFeaturesCol()),
+                                    outputTypeInfo);
+                        });
+        return new Table[] {tEnv.fromDataStream(output)};
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+        ReadWriteUtils.saveModelData(
+                KnnModelData.getModelDataStream(modelDataTable),
+                path,
+                new KnnModelData.ModelDataEncoder());
+    }
+
+    /**
+     * Loads model data from path.
+     *
+     * @param env Stream execution environment.
+     * @param path Model path.
+     * @return Knn model.
+     */
+    public static KnnModel load(StreamExecutionEnvironment env, String path) 
throws IOException {
+        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
+        KnnModel model = ReadWriteUtils.loadStageParam(path);
+        DataStream<KnnModelData> modelData =
+                ReadWriteUtils.loadModelData(env, path, new 
KnnModelData.ModelDataDecoder());
+        return model.setModelData(tEnv.fromDataStream(modelData));
+    }
+
+    /** This operator loads model data and predicts result. */
+    private static class PredictLabelFunction extends RichMapFunction<Row, 
Row> {
+        private final String featureCol;
+        private KnnModelData knnModelData;
+        private final int k;
+        private final String broadcastKey;
+
+        public PredictLabelFunction(String broadcastKey, int k, String 
featureCol) {
+            this.k = k;
+            this.broadcastKey = broadcastKey;
+            this.featureCol = featureCol;
+        }
+
+        @Override
+        public Row map(Row row) {
+            if (knnModelData == null) {
+                knnModelData =
+                        (KnnModelData)
+                                
getRuntimeContext().getBroadcastVariable(broadcastKey).get(0);
+            }
+            DenseVector feature = (DenseVector) row.getField(featureCol);
+            double prediction = predict(feature);
+            return Row.join(row, Row.of(prediction));
+        }
+
+        private double predict(DenseVector inputFeature) {
+            PriorityQueue<Tuple2<Double, Double>> priorityQueue =
+                    new PriorityQueue<>(Comparator.comparingDouble(o -> 
-o.f0));
+            double featureNorm = 0.0;
+            for (int i = 0; i < inputFeature.size(); ++i) {
+                featureNorm += inputFeature.values[i] * inputFeature.values[i];
+            }
+            Tuple2<DenseVector, Double> sample = Tuple2.of(inputFeature, 
featureNorm);
+            double[] labelValues = knnModelData.labels.values;
+            double[] normValues = knnModelData.featureNorms.values;
+            DenseVector distanceVector = new DenseVector(labelValues.length);

Review comment:
       Can you make the `distanceVector` a class member here? Because it is 
often big (up to number of data points) and allocated once for each data point.

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/KnnModel.java
##########
@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.Model;
+import org.apache.flink.ml.classification.knn.KnnModelData.ModelDataDecoder;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.linalg.BLAS;
+import org.apache.flink.ml.linalg.DenseMatrix;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+
+/** A Model which classifies data using the model data computed by {@link 
Knn}. */
+public class KnnModel implements Model<KnnModel>, KnnModelParams<KnnModel> {
+    protected Map<Param<?>, Object> params = new HashMap<>();
+    private Table modelDataTable;
+
+    public KnnModel() {
+        ParamUtils.initializeMapWithDefaultValues(params, this);
+    }
+
+    @Override
+    public KnnModel setModelData(Table... modelData) {
+        this.modelDataTable = modelData[0];
+        return this;
+    }
+
+    @Override
+    public Table[] getModelData() {
+        return new Table[] {modelDataTable};
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Table[] transform(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        DataStream<Row> data = tEnv.toDataStream(inputs[0]);
+        DataStream<KnnModelData> knnModel = 
KnnModelData.getModelDataStream(modelDataTable);
+        final String broadcastModelKey = "broadcastModelKey";
+
+        RowTypeInfo inputTypeInfo = 
TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema());
+        RowTypeInfo outputTypeInfo =
+                new RowTypeInfo(
+                        ArrayUtils.addAll(
+                                inputTypeInfo.getFieldTypes(), 
BasicTypeInfo.DOUBLE_TYPE_INFO),
+                        ArrayUtils.addAll(inputTypeInfo.getFieldNames(), 
getPredictionCol()));
+        DataStream<Row> output =
+                BroadcastUtils.withBroadcastStream(
+                        Collections.singletonList(data),
+                        Collections.singletonMap(broadcastModelKey, knnModel),
+                        inputList -> {
+                            DataStream input = inputList.get(0);
+                            return input.map(
+                                    new PredictLabelFunction(
+                                            broadcastModelKey, getK(), 
getFeaturesCol()),
+                                    outputTypeInfo);
+                        });
+        return new Table[] {tEnv.fromDataStream(output)};
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return this.params;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+        ReadWriteUtils.saveModelData(
+                KnnModelData.getModelDataStream(modelDataTable),
+                path,
+                new KnnModelData.ModelDataEncoder(),
+                0);
+    }
+
+    /**
+     * Loads model data from path.
+     *
+     * @param env Stream execution environment.
+     * @param path Model path.
+     * @return Knn model.
+     */
+    public static KnnModel load(StreamExecutionEnvironment env, String path) 
throws IOException {
+        KnnModel model = ReadWriteUtils.loadStageParam(path);
+        Table modelDataTable = ReadWriteUtils.loadModelData(env, path, new 
ModelDataDecoder(), 0);
+        return model.setModelData(modelDataTable);
+    }
+
+    /** This operator loads model data and predicts result. */
+    private static class PredictLabelFunction extends RichMapFunction<Row, 
Row> {
+        private final String featureCol;
+        private KnnModelData knnModelData;
+        private final int k;
+        private final String broadcastKey;
+
+        public PredictLabelFunction(String broadcastKey, int k, String 
featureCol) {
+            this.k = k;
+            this.broadcastKey = broadcastKey;
+            this.featureCol = featureCol;
+        }
+
+        @Override
+        public Row map(Row row) {
+            if (knnModelData == null) {
+                knnModelData =
+                        (KnnModelData)
+                                
getRuntimeContext().getBroadcastVariable(broadcastKey).get(0);
+            }
+            DenseVector vector = (DenseVector) row.getField(featureCol);
+            if (vector == null) {
+                return Row.join(row, new Row(1));
+            }
+            Tuple2<List<Double>, List<Double>> tuple2 = findNeighbor(vector);
+            double percent = 1.0 / tuple2.f0.size();
+            Map<Double, Double> detail = new HashMap<>(0);
+            for (Double obj : tuple2.f0) {
+                detail.merge(obj, percent, Double::sum);
+            }
+            double max = 0.0;
+            double prediction = 0.0;
+            for (Map.Entry<Double, Double> entry : detail.entrySet()) {
+                if (entry.getValue() > max) {
+                    max = entry.getValue();
+                    prediction = entry.getKey();
+                }
+            }
+            return Row.join(row, Row.of(prediction));
+        }
+
+        /** Finds the nearest k neighbors from whole vectors in matrix. */
+        private Tuple2<List<Double>, List<Double>> findNeighbor(DenseVector 
input) {
+            PriorityQueue<Tuple2<Double, Double>> priorityQueue =

Review comment:
       I also believe `nearestKNeighbors` is a better name. Can you also make 
the renaming here?

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/KnnModel.java
##########
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.Model;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.linalg.BLAS;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+
+/** A Model which classifies data using the model data computed by {@link 
Knn}. */
+public class KnnModel implements Model<KnnModel>, KnnModelParams<KnnModel> {
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+    private Table modelDataTable;
+
+    public KnnModel() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel setModelData(Table... modelData) {
+        modelDataTable = modelData[0];
+        return this;
+    }
+
+    @Override
+    public Table[] getModelData() {
+        return new Table[] {modelDataTable};
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Table[] transform(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        DataStream<Row> data = tEnv.toDataStream(inputs[0]);
+        DataStream<KnnModelData> knnModel = 
KnnModelData.getModelDataStream(modelDataTable);
+        final String broadcastModelKey = "broadcastModelKey";
+        RowTypeInfo inputTypeInfo = 
TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema());
+        RowTypeInfo outputTypeInfo =
+                new RowTypeInfo(
+                        ArrayUtils.addAll(
+                                inputTypeInfo.getFieldTypes(), 
BasicTypeInfo.DOUBLE_TYPE_INFO),
+                        ArrayUtils.addAll(inputTypeInfo.getFieldNames(), 
getPredictionCol()));
+        DataStream<Row> output =
+                BroadcastUtils.withBroadcastStream(
+                        Collections.singletonList(data),
+                        Collections.singletonMap(broadcastModelKey, knnModel),
+                        inputList -> {
+                            DataStream input = inputList.get(0);
+                            return input.map(
+                                    new PredictLabelFunction(
+                                            broadcastModelKey, getK(), 
getFeaturesCol()),
+                                    outputTypeInfo);
+                        });
+        return new Table[] {tEnv.fromDataStream(output)};
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+        ReadWriteUtils.saveModelData(
+                KnnModelData.getModelDataStream(modelDataTable),
+                path,
+                new KnnModelData.ModelDataEncoder());
+    }
+
+    /**
+     * Loads model data from path.
+     *
+     * @param env Stream execution environment.
+     * @param path Model path.
+     * @return Knn model.
+     */
+    public static KnnModel load(StreamExecutionEnvironment env, String path) 
throws IOException {
+        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
+        KnnModel model = ReadWriteUtils.loadStageParam(path);
+        DataStream<KnnModelData> modelData =
+                ReadWriteUtils.loadModelData(env, path, new 
KnnModelData.ModelDataDecoder());
+        return model.setModelData(tEnv.fromDataStream(modelData));
+    }
+
+    /** This operator loads model data and predicts result. */
+    private static class PredictLabelFunction extends RichMapFunction<Row, 
Row> {
+        private final String featureCol;
+        private KnnModelData knnModelData;
+        private final int k;
+        private final String broadcastKey;
+
+        public PredictLabelFunction(String broadcastKey, int k, String 
featureCol) {
+            this.k = k;
+            this.broadcastKey = broadcastKey;
+            this.featureCol = featureCol;
+        }
+
+        @Override
+        public Row map(Row row) {
+            if (knnModelData == null) {
+                knnModelData =
+                        (KnnModelData)
+                                
getRuntimeContext().getBroadcastVariable(broadcastKey).get(0);
+            }
+            DenseVector feature = (DenseVector) row.getField(featureCol);
+            double prediction = predict(feature);
+            return Row.join(row, Row.of(prediction));
+        }
+
+        private double predict(DenseVector inputFeature) {
+            PriorityQueue<Tuple2<Double, Double>> priorityQueue =
+                    new PriorityQueue<>(Comparator.comparingDouble(o -> 
-o.f0));
+            double featureNorm = 0.0;
+            for (int i = 0; i < inputFeature.size(); ++i) {
+                featureNorm += inputFeature.values[i] * inputFeature.values[i];
+            }
+            Tuple2<DenseVector, Double> sample = Tuple2.of(inputFeature, 
featureNorm);
+            double[] labelValues = knnModelData.labels.values;
+            double[] normValues = knnModelData.featureNorms.values;
+            DenseVector distanceVector = new DenseVector(labelValues.length);
+            BLAS.gemv(-2.0, knnModelData.packedFeatures, true, sample.f0, 0.0, 
distanceVector);
+            for (int i = 0; i < distanceVector.values.length; i++) {
+                distanceVector.values[i] =
+                        Math.sqrt(Math.abs(distanceVector.values[i] + 
sample.f1 + normValues[i]));
+            }
+            List<Tuple2<Double, Double>> distanceAndLabels = new 
ArrayList<>(labelValues.length);

Review comment:
       Is `distanceAndLabels` necessary here? We can directly access the 
`distanceVector` and `labels`, right?

##########
File path: 
flink-ml-lib/src/main/java/org/apache/flink/ml/classification/knn/KnnModel.java
##########
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.classification.knn;
+
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.Model;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.TableUtils;
+import org.apache.flink.ml.linalg.BLAS;
+import org.apache.flink.ml.linalg.DenseVector;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+
+/** A Model which classifies data using the model data computed by {@link 
Knn}. */
+public class KnnModel implements Model<KnnModel>, KnnModelParams<KnnModel> {
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+    private Table modelDataTable;
+
+    public KnnModel() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public KnnModel setModelData(Table... inputs) {
+        modelDataTable = inputs[0];
+        return this;
+    }
+
+    @Override
+    public Table[] getModelData() {
+        return new Table[] {modelDataTable};
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Table[] transform(Table... inputs) {
+        Preconditions.checkArgument(inputs.length == 1);
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) 
inputs[0]).getTableEnvironment();
+        DataStream<Row> data = tEnv.toDataStream(inputs[0]);
+        DataStream<KnnModelData> knnModel = 
KnnModelData.getModelDataStream(modelDataTable);
+        final String broadcastModelKey = "broadcastModelKey";
+        RowTypeInfo inputTypeInfo = 
TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema());
+        RowTypeInfo outputTypeInfo =
+                new RowTypeInfo(
+                        ArrayUtils.addAll(
+                                inputTypeInfo.getFieldTypes(), 
BasicTypeInfo.DOUBLE_TYPE_INFO),
+                        ArrayUtils.addAll(inputTypeInfo.getFieldNames(), 
getPredictionCol()));
+        DataStream<Row> output =
+                BroadcastUtils.withBroadcastStream(
+                        Collections.singletonList(data),
+                        Collections.singletonMap(broadcastModelKey, knnModel),
+                        inputList -> {
+                            DataStream input = inputList.get(0);
+                            return input.map(
+                                    new PredictLabelFunction(
+                                            broadcastModelKey, getK(), 
getFeaturesCol()),
+                                    outputTypeInfo);
+                        });
+        return new Table[] {tEnv.fromDataStream(output)};
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+        ReadWriteUtils.saveModelData(
+                KnnModelData.getModelDataStream(modelDataTable),
+                path,
+                new KnnModelData.ModelDataEncoder());
+    }
+
+    /**
+     * Loads model data from path.
+     *
+     * @param env Stream execution environment.
+     * @param path Model path.
+     * @return Knn model.
+     */
+    public static KnnModel load(StreamExecutionEnvironment env, String path) 
throws IOException {
+        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
+        KnnModel model = ReadWriteUtils.loadStageParam(path);
+        DataStream<KnnModelData> modelData =
+                ReadWriteUtils.loadModelData(env, path, new 
KnnModelData.ModelDataDecoder());
+        return model.setModelData(tEnv.fromDataStream(modelData));
+    }
+
+    /** This operator loads model data and predicts result. */
+    private static class PredictLabelFunction extends RichMapFunction<Row, 
Row> {
+        private final String featureCol;
+        private KnnModelData knnModelData;
+        private final int k;
+        private final String broadcastKey;
+
+        public PredictLabelFunction(String broadcastKey, int k, String 
featureCol) {
+            this.k = k;
+            this.broadcastKey = broadcastKey;
+            this.featureCol = featureCol;
+        }
+
+        @Override
+        public Row map(Row row) {
+            if (knnModelData == null) {
+                knnModelData =
+                        (KnnModelData)
+                                
getRuntimeContext().getBroadcastVariable(broadcastKey).get(0);
+            }
+            DenseVector feature = (DenseVector) row.getField(featureCol);
+            double prediction = predictLabel(feature);
+            return Row.join(row, Row.of(prediction));
+        }
+
+        private double predictLabel(DenseVector feature) {
+            PriorityQueue<Tuple2<Double, Double>> priorityQueue =
+                    new PriorityQueue<>(Comparator.comparingDouble(o -> 
-o.f0));
+            double featureNorm = 0.0;
+            for (int i = 0; i < feature.size(); ++i) {
+                featureNorm += feature.values[i] * feature.values[i];
+            }
+            Tuple2<DenseVector, Double> featureAndNorm = Tuple2.of(feature, 
featureNorm);
+            double[] labelValues = knnModelData.labels.values;
+            double[] normValues = knnModelData.featureNorms.values;
+            DenseVector distanceVector = new DenseVector(labelValues.length);
+            BLAS.gemv(
+                    -2.0,
+                    knnModelData.packedFeatures,
+                    true,
+                    featureAndNorm.f0,
+                    0.0,
+                    distanceVector);
+            for (int i = 0; i < distanceVector.values.length; i++) {
+                distanceVector.values[i] =
+                        Math.sqrt(
+                                Math.abs(
+                                        distanceVector.values[i]
+                                                + featureAndNorm.f1
+                                                + normValues[i]));
+            }
+            List<Tuple2<Double, Double>> distanceAndLabels = new 
ArrayList<>(labelValues.length);
+            for (int i = 0; i < labelValues.length; i++) {
+                distanceAndLabels.add(Tuple2.of(distanceVector.values[i], 
labelValues[i]));
+            }
+            for (Tuple2<Double, Double> distanceAndLabel : distanceAndLabels) {
+                if (priorityQueue.size() < k) {
+                    priorityQueue.add(distanceAndLabel);
+                } else {
+                    Tuple2<Double, Double> head = priorityQueue.peek();
+                    if (head.f0 > distanceAndLabel.f0) {
+                        priorityQueue.poll();
+                        priorityQueue.add(distanceAndLabel);
+                    }
+                }
+            }
+            List<Double> labels = new ArrayList<>();

Review comment:
       nits: Can you remove the `labels` here and do the weight aggregation 
directly? It seems that some code is redundant.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to