lindong28 commented on a change in pull request #36: URL: https://github.com/apache/flink-ml/pull/36#discussion_r752877279
########## File path: flink-ml-python/apache_flink_ml/ml/api/core.py ########## @@ -0,0 +1,221 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +from abc import ABC, abstractmethod +from typing import TypeVar, Generic, List + +from pyflink.datastream import StreamExecutionEnvironment +from pyflink.table import Table + +from apache_flink_ml.ml.param.param import WithParams + +T = TypeVar('T') +E = TypeVar('E') +M = TypeVar('M') + + +class Stage(WithParams[T], ABC): + """ + Base class for a node in a Pipeline or Graph. The interface is only a concept, and does not have + any actual functionality. Its subclasses could be Estimator, Model, Transformer or AlgoOperator. + No other classes should inherit this interface directly. + + Each stage is with parameters, and requires a public empty constructor for restoration. + """ + + @abstractmethod + def save(self, path: str) -> None: + """ + Saves this stage to the given path. + """ + pass + + @classmethod + @abstractmethod + def load(cls, env: StreamExecutionEnvironment, path: str) -> T: + """ + Instantiates a new stage instance based on the data read from the given path. + """ + pass + + +class AlgoOperator(Stage[T], ABC): + """ + An AlgoOperator takes a list of tables as inputs and produces a list of tables as results. It + can be used to encode generic multi-input multi-output computation logic. + """ + + @abstractmethod + def transform(self, *inputs: Table) -> List[Table]: + """ + Applies the AlgoOperator on the given input tables and returns the result tables. + + :param inputs: A list of tables. + :return: A list of tables. + """ + pass + + +class Transformer(AlgoOperator[T], ABC): + """ + A Transformer is an AlgoOperator with the semantic difference that it encodes the Transformation + logic, such that a record in the output typically corresponds to one record in the input. In + contrast, an AlgoOperator is a better fit to express aggregation logic where a record in the + output could be computed from an arbitrary number of records in the input. + """ + pass + + +class Model(Transformer[T], ABC): + """ + A Model is typically generated by invoking :func:`~Estimator.fit`. A Model is a Transformer with + the extra APIs to set and get model data. + """ + + def set_model_data(self, *inputs: Table) -> None: + raise Exception("This operation is not supported.") + + def get_model_data(self) -> None: + """ + Gets a list of tables representing the model data. Each table could be an unbounded stream + of model data changes. + + :return: A list of tables. + """ + raise Exception("This operation is not supported.") + + +class Estimator(Generic[E, M], Stage[E], ABC): + """ + Estimators are responsible for training and generating Models. + """ + + def fit(self, *inputs: Table) -> Model[M]: + """ + Trains on the given inputs and produces a Model. + + :param inputs: A list of tables. + :return: A Model. + """ + pass + + +class PipelineModel(Model): + """ + A PipelineModel acts as a Model. It consists of an ordered list of stages, each of which could + be a Model, Transformer or AlgoOperator. + """ + + def __init__(self, stages: List[Stage]): + self._stages = stages + + def transform(self, *inputs: Table) -> List[Table]: + """ + Applies all stages in this PipelineModel on the input tables in order. The output of one + stage is used as the input of the next stage (if any). The output of the last stage is + returned as the result of this method. + + :param inputs: A list of tables. + :return: A list of tables. + """ + for stage in self._stages: + if isinstance(stage, AlgoOperator): + inputs = stage.transform(*inputs) + else: + raise TypeError(f"The stage {stage} must be an AlgoOperator.") + return list(inputs) + + def save(self, path: str) -> None: + from apache_flink_ml.ml.util import read_write_utils + read_write_utils.save_pipeline(self, self._stages, path) + + @classmethod + def load(cls, env: StreamExecutionEnvironment, path: str) -> 'PipelineModel': + from apache_flink_ml.ml.util import read_write_utils + return PipelineModel(read_write_utils.load_pipeline(env, path)) + + def get_param_map(self): + return {} + + +class Pipeline(Estimator[E, PipelineModel]): + """ + A Pipeline acts as an Estimator. It consists of an ordered list of stages, each of which could + be an Estimator, Model, Transformer or AlgoOperator. + """ + + def __init__(self, stages: List[Stage]): + self._stages = stages + + def fit(self, *inputs: Table) -> PipelineModel: + """ + Trains the pipeline to fit on the given tables. + + This method goes through all stages of this pipeline in order and does the following on + each stage until the last Estimator (inclusive). + + <ul> + <li> If a stage is an Estimator, invoke :func:`~Estimator.fit` with the input + tables to generate a Model. And if there is Estimator after this stage, transform + the input tables using the generated Model to get result tables, then pass the + result tables to the next stage as inputs. + <li> If a stage is an AlgoOperator AND there is Estimator after this stage, transform + the input tables using this stage to get result tables, then pass the result tables + to the next stage as inputs. + </ul> + + After all the Estimators are trained to fit their input tables, a new PipelineModel will + be created with the same stages in this pipeline, except that all the Estimators in the + PipelineModel are replaced with the models generated in the above process. + + :param inputs: A list of tables. + :return: A PipelineModel. + """ + last_estimator_idx = -1 + for i, stage in enumerate(self._stages): + if isinstance(stage, Estimator): + last_estimator_idx = i + + model_stages = [] + last_inputs = inputs + for i, stage in enumerate(self._stages): + if not isinstance(stage, AlgoOperator): Review comment: Hmm.. I think we allow `Estimator` as a member stage of pipeline. And `Estimator` does not inherit `AlgoOperator`. Thus this check needs to be removed? ########## File path: flink-ml-python/apache_flink_ml/ml/tests/test_stage.py ########## @@ -0,0 +1,211 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ +import os +import shutil +import tempfile +import unittest +from typing import Dict, Any + +from pyflink.datastream import StreamExecutionEnvironment +from pyflink.table import StreamTableEnvironment + +from apache_flink_ml.ml.api.core import T, Stage +from apache_flink_ml.ml.param.param import ParamValidators, Param, BooleanParam, IntParam, \ + FloatParam, StringParam, IntArrayParam, FloatArrayParam, StringArrayParam + +BOOLEAN_PARAM = BooleanParam("boolean_param", "Description", False) +INT_PARAM = IntParam("int_param", "Description", 1, ParamValidators.lt(100)) +FLOAT_PARAM = FloatParam("float_param", "Description", 3.0, ParamValidators.lt(100)) +STRING_PARAM = StringParam('string_param', "Description", "5") +INT_ARRAY_PARAM = IntArrayParam("int_array_param", "Description", [6, 7]) +FLOAT_ARRAY_PARAM = FloatArrayParam("float_array_param", "Description", [10.0, 11.0]) +STRING_ARRAY_PARAM = StringArrayParam("string_array_param", "Description", ["14", "15"]) +EXTRA_INT_PARAM = IntParam("extra_int_param", + "Description", + 20, + ParamValidators.always_true()) +PARAM_WITH_NONE_DEFAULT = IntParam("param_with_none_default", + "Must be explicitly set with a non-none value", + None, + ParamValidators.not_null()) + + +class StageTest(unittest.TestCase): + def setUp(self): + self.env = StreamExecutionEnvironment.get_execution_environment() + self.t_env = StreamTableEnvironment.create(self.env) + self.t_env.get_config().get_configuration().set_string("parallelism.default", "2") + self.temp_dir = tempfile.mkdtemp() + + def tearDown(self) -> None: + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_param_set_value_with_name(self): + stage = MyStage() + stage.set(INT_PARAM, 2) + self.assertEqual(2, stage.get(INT_PARAM)) + + param = stage.get_param("int_param") + stage.set(param, 3) + self.assertEqual(3, stage.get(param)) + + param = stage.get_param('extra_int_param') + stage.set(param, 50) + self.assertEqual(50, stage.get(param)) + + def test_param_with_null_default(self): + stage = MyStage() + import pytest + with pytest.raises(ValueError, match='value should not be None'): + stage.get(PARAM_WITH_NONE_DEFAULT) + + stage.set(PARAM_WITH_NONE_DEFAULT, 3) + self.assertEqual(3, stage.get(PARAM_WITH_NONE_DEFAULT)) + + def test_param_set_invalid_value(self): + stage = MyStage() + import pytest + + with pytest.raises(ValueError, match='Parameter int_param is given an invalid value 100.'): + stage.set(INT_PARAM, 100) + + with pytest.raises(ValueError, + match='Parameter float_param is given an invalid value 100.0.'): + stage.set(FLOAT_PARAM, 100.0) + + with pytest.raises(TypeError, + match="Parameter int_param's type <class 'int'> is incompatible with " + "the type of <class 'str'>"): + stage.set(INT_PARAM, "100") + + with pytest.raises(TypeError, + match="Parameter string_param's type <class 'str'> is incompatible with" + " the type of <class 'int'>"): + stage.set(STRING_PARAM, 100) + + def test_param_set_valid_value(self): + stage = MyStage() + + stage.set(BOOLEAN_PARAM, True) + self.assertTrue(stage.get(BOOLEAN_PARAM)) + + stage.set(INT_PARAM, 50) + self.assertEqual(50, stage.get(INT_PARAM)) + + stage.set(FLOAT_PARAM, 50.0) + self.assertEqual(50.0, stage.get(FLOAT_PARAM)) + + stage.set(STRING_PARAM, "50") + self.assertEqual("50", stage.get(STRING_PARAM)) + + stage.set(INT_ARRAY_PARAM, [50, 51]) + self.assertEqual([50, 51], stage.get(INT_ARRAY_PARAM)) + + stage.set(FLOAT_ARRAY_PARAM, [50.0, 51.0]) + self.assertEqual([50.0, 51.0], stage.get(FLOAT_ARRAY_PARAM)) + + stage.set(STRING_ARRAY_PARAM, ["50", "51"]) + self.assertEqual(["50", "51"], stage.get(STRING_ARRAY_PARAM)) + + def test_stage_save_load(self): + stage = MyStage() + stage.set(PARAM_WITH_NONE_DEFAULT, 1) + path = os.path.join(self.temp_dir, "test_stage_save_load") + stage.save(path) + loaded_stage = MyStage.load(self.env, path) + self.assertEqual(stage.get_param_map(), loaded_stage.get_param_map()) + self.assertEqual(1, loaded_stage.get(INT_PARAM)) + + def test_validators(self): + gt = ParamValidators.gt(10) + self.assertFalse(gt.validate(None)) + self.assertFalse(gt.validate(5)) + self.assertFalse(gt.validate(10)) + self.assertTrue(gt.validate(15)) + + gt_eq = ParamValidators.gt_eq(10) + self.assertFalse(gt_eq.validate(None)) + self.assertFalse(gt_eq.validate(5)) + self.assertTrue(gt_eq.validate(10)) + self.assertTrue(gt_eq.validate(15)) + + lt = ParamValidators.lt(10) + self.assertFalse(lt.validate(None)) + self.assertTrue(lt.validate(5)) + self.assertFalse(lt.validate(10)) + self.assertFalse(lt.validate(15)) + + lt_eq = ParamValidators.lt_eq(10) + self.assertFalse(lt_eq.validate(None)) + self.assertTrue(lt_eq.validate(5)) + self.assertTrue(lt_eq.validate(10)) + self.assertFalse(lt_eq.validate(15)) + + in_range_inclusive = ParamValidators.in_range(5, 15) + self.assertFalse(in_range_inclusive.validate(None)) + self.assertFalse(in_range_inclusive.validate(0)) + self.assertTrue(in_range_inclusive.validate(5)) + self.assertTrue(in_range_inclusive.validate(10)) + self.assertTrue(in_range_inclusive.validate(15)) + self.assertFalse(in_range_inclusive.validate(20)) + + in_range_exclusive = ParamValidators.in_range(5, 15, False, False) + self.assertFalse(in_range_exclusive.validate(None)) + self.assertFalse(in_range_exclusive.validate(0)) + self.assertFalse(in_range_exclusive.validate(5)) + self.assertTrue(in_range_exclusive.validate(10)) + self.assertFalse(in_range_exclusive.validate(15)) + self.assertFalse(in_range_exclusive.validate(20)) + + in_array = ParamValidators.in_array([1, 2, 3]) + self.assertFalse(in_array.validate(None)) + self.assertTrue(in_array.validate(1)) + self.assertFalse(in_array.validate(0)) + + not_null = ParamValidators.not_null() + self.assertTrue(not_null.validate(5)) + self.assertFalse(not_null.validate(None)) + + +class MyStage(Stage): + def __init__(self): + self._param_map = {} # type: Dict[Param, Any] + self._init_param() + + def save(self, path: str) -> None: + from apache_flink_ml.ml.util import read_write_utils + read_write_utils.save_metadata(self, path) + + @classmethod + def load(cls, env: StreamExecutionEnvironment, path: str) -> T: + from apache_flink_ml.ml.util import read_write_utils + return read_write_utils.load_stage_param(path) + + def get_param_map(self) -> Dict['Param[Any]', Any]: + return self._param_map + + def _init_param(self): + self._param_map[BOOLEAN_PARAM] = BOOLEAN_PARAM.default_value Review comment: Sounds good. I don't know as much about Python idiom. Please feel free to keep this pattern if it fits Python idiom better. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
