jiangxin369 commented on code in PR #191: URL: https://github.com/apache/flink-ml/pull/191#discussion_r1060256575
########## docs/content/docs/operators/feature/minhashlsh.md: ########## @@ -0,0 +1,280 @@ +--- +title: "MinHash LSH" +weight: 1 +type: docs +aliases: +- /operators/feature/minhashlsh.html +--- + +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +## MinHash LSH + +MinHash LSH is a Locality Sensitive Hashing (LSH) scheme for Jaccard distance metric. +The input features are sets of natural numbers represented as non-zero indices of vectors, +either dense vectors or sparse vectors. Typically, sparse vectors are more efficient. + +In addition to transforming input feature vectors to multiple hash values, the MinHash LSH +model also supports approximate nearest neighbors search within a dataset regarding a key +vector and approximate similarity join between two datasets. + +### Input Columns + +| Param name | Type | Default | Description | +|:-----------|:-------|:----------|:-----------------------| +| inputCol | Vector | `"input"` | Features to be mapped. | + +### Output Columns + +| Param name | Type | Default | Description | +|:-----------|:--------------|:-----------|:-------------| +| outputCol | DenseVector[] | `"output"` | Hash values. | + +### Parameters Review Comment: Since `MinHashLSH` has some parameters that cannot be set to `MinHashLSHModel`, let's separate the parameters into two tables just like in `vectorindexer.md`. ########## flink-ml-python/pyflink/ml/lib/feature/lsh.py: ########## @@ -0,0 +1,191 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +import typing +from abc import ABC +from pyflink.java_gateway import get_gateway +from pyflink.table import Table +from pyflink.util.java_utils import to_jarray + +from pyflink.ml.core.linalg import Vector, DenseVector, SparseVector +from pyflink.ml.core.param import Param, IntParam, ParamValidators +from pyflink.ml.core.wrapper import JavaWithParams +from pyflink.ml.lib.feature.common import JavaFeatureEstimator, JavaFeatureModel +from pyflink.ml.lib.param import HasInputCol, HasOutputCol, HasSeed + + +class _LSHModelParams(JavaWithParams, + HasInputCol, + HasOutputCol): + """ + Params for :class:`LSHModel` + """ + + def __init__(self, java_params): + super(_LSHModelParams, self).__init__(java_params) + + +class _LSHParams(_LSHModelParams): + """ + Params for :class:`LSH` + """ + + NUM_HASH_TABLES: Param[int] = IntParam( + "num_hash_tables", "Number of hash tables.", 1, ParamValidators.gt_eq(1) + ) + + NUM_HASH_FUNCTIONS_PER_TABLE: Param[int] = IntParam( + "num_hash_functions_per_table", + "Number of hash functions per table.", + 1, + ParamValidators.gt_eq(1.)) Review Comment: nit: let's unify the parameter of `gt_eq` in `NUM_HASH_TABLES` and `NUM_HASH_FUNCTIONS_PER_TABLE` to either 1. or 1. ########## flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/MinHashLSHExample.java: ########## @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.examples.feature; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.ml.feature.lsh.MinHashLSH; +import org.apache.flink.ml.feature.lsh.MinHashLSHModel; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.SparseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.linalg.Vectors; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.types.Row; + +import org.apache.commons.collections.IteratorUtils; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.flink.table.api.Expressions.$; + +/** + * Simple program that trains a MinHashLSH model and uses it for approximate nearest neighbors and + * similarity join. + */ +public class MinHashLSHExample { + public static void main(String[] args) throws Exception { + + // Creates a new StreamExecutionEnvironment + StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + + // Creates a StreamTableEnvironment + StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); + + // Generates two datasets + Table data = + tEnv.fromDataStream( + env.fromCollection( + Arrays.asList( + Row.of( + 0, + Vectors.sparse( + 6, + new int[] {0, 1, 2}, + new double[] {1., 1., 1.})), + Row.of( + 1, + Vectors.sparse( + 6, + new int[] {2, 3, 4}, + new double[] {1., 1., 1.})), + Row.of( + 2, + Vectors.sparse( + 6, + new int[] {0, 2, 4}, + new double[] {1., 1., 1.}))), + Types.ROW_NAMED( + new String[] {"id", "vec"}, + Types.INT, + TypeInformation.of(SparseVector.class)))); + + Table dataB = + tEnv.fromDataStream( + env.fromCollection( + Arrays.asList( + Row.of( + 3, + Vectors.sparse( + 6, + new int[] {1, 3, 5}, + new double[] {1., 1., 1.})), + Row.of( + 4, + Vectors.sparse( + 6, + new int[] {2, 3, 5}, + new double[] {1., 1., 1.})), + Row.of( + 5, + Vectors.sparse( + 6, + new int[] {1, 2, 4}, + new double[] {1., 1., 1.}))), + Types.ROW_NAMED( + new String[] {"id", "vec"}, + Types.INT, + TypeInformation.of(SparseVector.class)))); + + // Creates a MinHashLSH estimator object and initializes its parameters + MinHashLSH lsh = + new MinHashLSH() + .setInputCol("vec") + .setOutputCol("hashes") + .setSeed(2022) + .setNumHashTables(5); + + // Trains the MinHashLSH model + MinHashLSHModel model = lsh.fit(data); + + // Uses the MinHashLSH model for transformation + Table output = model.transform(data)[0]; + + // Extracts and displays the results + List<String> fieldNames = output.getResolvedSchema().getColumnNames(); + for (Row result : + (List<Row>) IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect())) { + Vector inputValue = result.getFieldAs(fieldNames.indexOf(lsh.getInputCol())); + DenseVector[] outputValue = result.getFieldAs(fieldNames.indexOf(lsh.getOutputCol())); + System.out.printf( + "Vector: %s \tHash values: %s\n", inputValue, Arrays.toString(outputValue)); + } + + // Finds approximate nearest neighbors of the key + Vector key = Vectors.sparse(6, new int[] {1, 3}, new double[] {1., 1.}); + output = model.approxNearestNeighbors(data, key, 2).select($("id"), $("distCol")); + for (Row result : + (List<Row>) IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect())) { + int idValue = result.getFieldAs(fieldNames.indexOf("id")); + double distValue = result.getFieldAs(result.getArity() - 1); + System.out.printf("ID: %d \tDistance: %f\n", idValue, distValue); + } + + // Approximately finds pairs from two datasets with distances smaller than the threshold + output = model.approxSimilarityJoin(data, dataB, .6, "id"); + for (Row result : + (List<Row>) IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect())) { + int idAValue = result.getFieldAs(0); + int idBValue = result.getFieldAs(1); + double distValue = result.getFieldAs(2); + System.out.printf( + "ID from left: %d \tID from right: %d \t Distance: %f\n", + idAValue, idAValue, distValue); Review Comment: Should this be "idAValue, idBValue, distValue"? ########## flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/MinHashLSHExample.java: ########## @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.examples.feature; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.ml.feature.lsh.MinHashLSH; +import org.apache.flink.ml.feature.lsh.MinHashLSHModel; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.SparseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.linalg.Vectors; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.types.Row; + +import org.apache.commons.collections.IteratorUtils; + +import java.util.Arrays; +import java.util.List; + +import static org.apache.flink.table.api.Expressions.$; + +/** + * Simple program that trains a MinHashLSH model and uses it for approximate nearest neighbors and + * similarity join. + */ +public class MinHashLSHExample { + public static void main(String[] args) throws Exception { + + // Creates a new StreamExecutionEnvironment + StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + + // Creates a StreamTableEnvironment + StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); + + // Generates two datasets + Table data = + tEnv.fromDataStream( + env.fromCollection( + Arrays.asList( + Row.of( + 0, + Vectors.sparse( + 6, + new int[] {0, 1, 2}, + new double[] {1., 1., 1.})), + Row.of( + 1, + Vectors.sparse( + 6, + new int[] {2, 3, 4}, + new double[] {1., 1., 1.})), + Row.of( + 2, + Vectors.sparse( + 6, + new int[] {0, 2, 4}, + new double[] {1., 1., 1.}))), + Types.ROW_NAMED( + new String[] {"id", "vec"}, + Types.INT, + TypeInformation.of(SparseVector.class)))); + + Table dataB = Review Comment: As the examples are expected to be the best practice for users, the variable naming should be more strict. Would it be better if rename the `data` to `inputTable`, `dataB` to `similarityJoinTable`? So as in python example. ########## flink-ml-lib/src/main/java/org/apache/flink/ml/feature/lsh/LSHModel.java: ########## @@ -0,0 +1,457 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.feature.lsh; + +import org.apache.flink.api.common.functions.AggregateFunction; +import org.apache.flink.api.common.functions.FlatMapFunction; +import org.apache.flink.api.common.functions.RichFlatMapFunction; +import org.apache.flink.api.common.functions.RichMapFunction; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.functions.KeySelector; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.api.java.typeutils.RowTypeInfo; +import org.apache.flink.ml.api.Model; +import org.apache.flink.ml.common.broadcast.BroadcastUtils; +import org.apache.flink.ml.common.datastream.DataStreamUtils; +import org.apache.flink.ml.common.datastream.EndOfStreamWindows; +import org.apache.flink.ml.common.datastream.TableUtils; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.param.Param; +import org.apache.flink.ml.util.ParamUtils; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableImpl; +import org.apache.flink.types.Row; +import org.apache.flink.util.Collector; +import org.apache.flink.util.Preconditions; + +import org.apache.commons.lang3.ArrayUtils; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.PriorityQueue; + +/** + * Base class for LSH model. + * + * <p>In addition to transforming input feature vectors to multiple hash values, it also supports + * approximate nearest neighbors search within a dataset regarding a key vector and approximate + * similarity join between two datasets. + * + * @param <T> class type of the LSHModel implementation itself. + */ +abstract class LSHModel<T extends LSHModel<T>> implements Model<T>, LSHModelParams<T> { + private static final String MODEL_DATA_BC_KEY = "modelData"; + + private final Map<Param<?>, Object> paramMap = new HashMap<>(); + + /** Stores the corresponding model data class of T. */ + private final Class<? extends LSHModelData> modelDataClass; + + protected Table modelDataTable; + + public LSHModel(Class<? extends LSHModelData> modelDataClass) { + this.modelDataClass = modelDataClass; + ParamUtils.initializeMapWithDefaultValues(paramMap, this); + } + + @Override + public T setModelData(Table... inputs) { + modelDataTable = inputs[0]; Review Comment: Here we can add an argument check. ``` Preconditions.checkArgument(inputs.length == 1); ``` ########## flink-ml-python/pyflink/ml/lib/feature/lsh.py: ########## @@ -0,0 +1,191 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +import typing +from abc import ABC +from pyflink.java_gateway import get_gateway +from pyflink.table import Table +from pyflink.util.java_utils import to_jarray + +from pyflink.ml.core.linalg import Vector, DenseVector, SparseVector +from pyflink.ml.core.param import Param, IntParam, ParamValidators +from pyflink.ml.core.wrapper import JavaWithParams +from pyflink.ml.lib.feature.common import JavaFeatureEstimator, JavaFeatureModel +from pyflink.ml.lib.param import HasInputCol, HasOutputCol, HasSeed + + +class _LSHModelParams(JavaWithParams, + HasInputCol, + HasOutputCol): + """ + Params for :class:`LSHModel` + """ + + def __init__(self, java_params): + super(_LSHModelParams, self).__init__(java_params) + + +class _LSHParams(_LSHModelParams): + """ + Params for :class:`LSH` + """ + + NUM_HASH_TABLES: Param[int] = IntParam( + "num_hash_tables", "Number of hash tables.", 1, ParamValidators.gt_eq(1) + ) + + NUM_HASH_FUNCTIONS_PER_TABLE: Param[int] = IntParam( + "num_hash_functions_per_table", + "Number of hash functions per table.", + 1, + ParamValidators.gt_eq(1.)) + + def __init__(self, java_params): + super(_LSHParams, self).__init__(java_params) + + def set_num_hash_tables(self, value: int): + return typing.cast(_LSHParams, self.set(self.NUM_HASH_TABLES, value)) + + def get_num_hash_tables(self): + return self.get(self.NUM_HASH_TABLES) + + @property + def num_hash_tables(self): + return self.get_num_hash_tables() + + def set_num_hash_functions_per_table(self, value: int): + return typing.cast(_LSHParams, self.set(self.NUM_HASH_FUNCTIONS_PER_TABLE, value)) + + def get_num_hash_functions_per_table(self): + return self.get(self.NUM_HASH_FUNCTIONS_PER_TABLE) + + @property + def num_hash_functions_per_table(self): + return self.get_num_hash_functions_per_table() + + +class _LSH(JavaFeatureEstimator, ABC): + """ + Base class for estimators which implement LSH (Locality-sensitive hashing) algorithms. + """ + + def __init__(self): + super(_LSH, self).__init__() + + @classmethod + def _java_estimator_package_name(cls) -> str: + return "lsh" + + +class _LSHModel(JavaFeatureModel, ABC): + """ + Base class for LSH model. + """ + + def __init__(self, java_model): + super(_LSHModel, self).__init__(java_model) + + @classmethod + def _java_model_package_name(cls) -> str: + return "lsh" + + def approx_nearest_neighbors(self, dataset: Table, key: Vector, k: int, + dist_col: str = 'distCol'): + """ + Given a dataset and an item, approximately find at most k items which have the closest + distance to the item. If the `outputCol` is missing in the given dataset, this method + transforms the dataset with the model at first. + + :param dataset: The dataset in which to to search for nearest neighbors. + :param key: The item to search for. + :param k: The maximum number of nearest neighbors. + :param dist_col: The output column storing the distance between each neighbor and the + key. + :return: A dataset containing at most k items closest to the key with a column named + `distCol` appended. Review Comment: To keep the same coding style with PyFlink and flink-ml-python, it's recommended to align the beginning of each line. E.g., ``` :param dist_col: The output column storing the distance between each neighbor and the key. :return: A dataset containing at most k items closest to the key with a column named `distCol` appended. ``` So as the other comments. ########## flink-ml-python/pyflink/ml/lib/feature/lsh.py: ########## @@ -0,0 +1,191 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +import typing +from abc import ABC +from pyflink.java_gateway import get_gateway +from pyflink.table import Table +from pyflink.util.java_utils import to_jarray + +from pyflink.ml.core.linalg import Vector, DenseVector, SparseVector +from pyflink.ml.core.param import Param, IntParam, ParamValidators +from pyflink.ml.core.wrapper import JavaWithParams +from pyflink.ml.lib.feature.common import JavaFeatureEstimator, JavaFeatureModel +from pyflink.ml.lib.param import HasInputCol, HasOutputCol, HasSeed + + +class _LSHModelParams(JavaWithParams, + HasInputCol, + HasOutputCol): + """ + Params for :class:`LSHModel` + """ + + def __init__(self, java_params): + super(_LSHModelParams, self).__init__(java_params) + + +class _LSHParams(_LSHModelParams): + """ + Params for :class:`LSH` + """ + + NUM_HASH_TABLES: Param[int] = IntParam( + "num_hash_tables", "Number of hash tables.", 1, ParamValidators.gt_eq(1) + ) + + NUM_HASH_FUNCTIONS_PER_TABLE: Param[int] = IntParam( + "num_hash_functions_per_table", + "Number of hash functions per table.", + 1, + ParamValidators.gt_eq(1.)) + + def __init__(self, java_params): + super(_LSHParams, self).__init__(java_params) + + def set_num_hash_tables(self, value: int): + return typing.cast(_LSHParams, self.set(self.NUM_HASH_TABLES, value)) + + def get_num_hash_tables(self): + return self.get(self.NUM_HASH_TABLES) + + @property + def num_hash_tables(self): + return self.get_num_hash_tables() + + def set_num_hash_functions_per_table(self, value: int): + return typing.cast(_LSHParams, self.set(self.NUM_HASH_FUNCTIONS_PER_TABLE, value)) + + def get_num_hash_functions_per_table(self): + return self.get(self.NUM_HASH_FUNCTIONS_PER_TABLE) + + @property + def num_hash_functions_per_table(self): + return self.get_num_hash_functions_per_table() + + +class _LSH(JavaFeatureEstimator, ABC): + """ + Base class for estimators which implement LSH (Locality-sensitive hashing) algorithms. + """ + + def __init__(self): + super(_LSH, self).__init__() + + @classmethod + def _java_estimator_package_name(cls) -> str: + return "lsh" + + +class _LSHModel(JavaFeatureModel, ABC): + """ + Base class for LSH model. + """ + + def __init__(self, java_model): + super(_LSHModel, self).__init__(java_model) + + @classmethod + def _java_model_package_name(cls) -> str: + return "lsh" + + def approx_nearest_neighbors(self, dataset: Table, key: Vector, k: int, + dist_col: str = 'distCol'): + """ + Given a dataset and an item, approximately find at most k items which have the closest + distance to the item. If the `outputCol` is missing in the given dataset, this method + transforms the dataset with the model at first. + + :param dataset: The dataset in which to to search for nearest neighbors. + :param key: The item to search for. + :param k: The maximum number of nearest neighbors. + :param dist_col: The output column storing the distance between each neighbor and the + key. + :return: A dataset containing at most k items closest to the key with a column named + `distCol` appended. + """ + j_vectors = get_gateway().jvm.org.apache.flink.ml.linalg.Vectors + if isinstance(key, (DenseVector,)): + j_key = j_vectors.dense(to_jarray(get_gateway().jvm.double, key.values.tolist())) + elif isinstance(key, (SparseVector,)): + # noinspection PyProtectedMember + j_key = j_vectors.sparse( + key.size(), + to_jarray(get_gateway().jvm.int, key._indices.tolist()), + to_jarray(get_gateway().jvm.double, key._values.tolist()) + ) + else: + raise TypeError(f'Key {key} must be an instance of Vector.') + + # noinspection PyProtectedMember + return Table(self._java_obj.approxNearestNeighbors( + dataset._j_table, j_key, k, dist_col), self._t_env) + + def approx_similarity_join(self, dataset_a: Table, dataset_b: Table, threshold: float, + id_col: str, dist_col: str = 'distCol'): + """ + Join two datasets to approximately find all pairs of rows whose distance are smaller Review Comment: ```suggestion Joins two datasets to approximately find all pairs of rows whose distances are smaller ``` Btw, let's go through all comments to make sure that they are in the correct grammar. ########## flink-ml-python/pyflink/ml/lib/feature/lsh.py: ########## @@ -0,0 +1,191 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +import typing +from abc import ABC +from pyflink.java_gateway import get_gateway +from pyflink.table import Table +from pyflink.util.java_utils import to_jarray + +from pyflink.ml.core.linalg import Vector, DenseVector, SparseVector +from pyflink.ml.core.param import Param, IntParam, ParamValidators +from pyflink.ml.core.wrapper import JavaWithParams +from pyflink.ml.lib.feature.common import JavaFeatureEstimator, JavaFeatureModel +from pyflink.ml.lib.param import HasInputCol, HasOutputCol, HasSeed + + +class _LSHModelParams(JavaWithParams, + HasInputCol, + HasOutputCol): + """ + Params for :class:`LSHModel` + """ + + def __init__(self, java_params): + super(_LSHModelParams, self).__init__(java_params) + + +class _LSHParams(_LSHModelParams): + """ + Params for :class:`LSH` + """ + + NUM_HASH_TABLES: Param[int] = IntParam( + "num_hash_tables", "Number of hash tables.", 1, ParamValidators.gt_eq(1) + ) + + NUM_HASH_FUNCTIONS_PER_TABLE: Param[int] = IntParam( + "num_hash_functions_per_table", + "Number of hash functions per table.", + 1, + ParamValidators.gt_eq(1.)) + + def __init__(self, java_params): + super(_LSHParams, self).__init__(java_params) + + def set_num_hash_tables(self, value: int): + return typing.cast(_LSHParams, self.set(self.NUM_HASH_TABLES, value)) + + def get_num_hash_tables(self): + return self.get(self.NUM_HASH_TABLES) + + @property + def num_hash_tables(self): + return self.get_num_hash_tables() + + def set_num_hash_functions_per_table(self, value: int): + return typing.cast(_LSHParams, self.set(self.NUM_HASH_FUNCTIONS_PER_TABLE, value)) + + def get_num_hash_functions_per_table(self): + return self.get(self.NUM_HASH_FUNCTIONS_PER_TABLE) + + @property + def num_hash_functions_per_table(self): + return self.get_num_hash_functions_per_table() + + +class _LSH(JavaFeatureEstimator, ABC): + """ + Base class for estimators which implement LSH (Locality-sensitive hashing) algorithms. + """ + + def __init__(self): + super(_LSH, self).__init__() + + @classmethod + def _java_estimator_package_name(cls) -> str: + return "lsh" + + +class _LSHModel(JavaFeatureModel, ABC): + """ + Base class for LSH model. + """ + + def __init__(self, java_model): + super(_LSHModel, self).__init__(java_model) + + @classmethod + def _java_model_package_name(cls) -> str: + return "lsh" + + def approx_nearest_neighbors(self, dataset: Table, key: Vector, k: int, + dist_col: str = 'distCol'): + """ + Given a dataset and an item, approximately find at most k items which have the closest Review Comment: ```suggestion Given a dataset and an item, approximately finds at most k items that have the closest ``` ########## flink-ml-lib/src/test/java/org/apache/flink/ml/feature/MinHashLSHTest.java: ########## @@ -0,0 +1,452 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.feature; + +import org.apache.flink.api.common.restartstrategy.RestartStrategies; +import org.apache.flink.api.java.tuple.Tuple3; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.ml.feature.lsh.MinHashLSH; +import org.apache.flink.ml.feature.lsh.MinHashLSHModel; +import org.apache.flink.ml.feature.lsh.MinHashLSHModelData; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.SparseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.linalg.Vectors; +import org.apache.flink.ml.util.ReadWriteUtils; +import org.apache.flink.ml.util.TestUtils; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.api.Schema; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableImpl; +import org.apache.flink.test.util.AbstractTestBase; +import org.apache.flink.types.Row; + +import org.apache.commons.collections.IteratorUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +import static org.apache.flink.table.api.Expressions.$; + +/** Tests {@link MinHashLSH} and {@link MinHashLSHModel}. */ +public class MinHashLSHTest extends AbstractTestBase { + @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); + private StreamExecutionEnvironment env; + private StreamTableEnvironment tEnv; + + /** + * Default case for most tests. + * + * @return a tuple including the estimator, input data table, and output rows. + */ + private Tuple3<MinHashLSH, Table, List<Row>> getDefaultCase() { Review Comment: I also think declaring them as variables is enough which reduces the redundant code like ``` Tuple3<MinHashLSH, Table, List<Row>> defaultCase = getDefaultCase(); MinHashLSH lsh = defaultCase.f0; Table data = defaultCase.f1; List<Row> expected = defaultCase.f2; ``` , and keeps alignment with the other test classes. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
