HuangXingBo commented on a change in pull request #36: URL: https://github.com/apache/flink-ml/pull/36#discussion_r752820431
########## File path: flink-ml-python/apache_flink_ml/ml/api/core.py ########## @@ -0,0 +1,221 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +from abc import ABC, abstractmethod +from typing import TypeVar, Generic, List + +from pyflink.datastream import StreamExecutionEnvironment +from pyflink.table import Table + +from apache_flink_ml.ml.param.param import WithParams + +T = TypeVar('T') +E = TypeVar('E') +M = TypeVar('M') + + +class Stage(WithParams[T], ABC): + """ + Base class for a node in a Pipeline or Graph. The interface is only a concept, and does not have + any actual functionality. Its subclasses could be Estimator, Model, Transformer or AlgoOperator. + No other classes should inherit this interface directly. + + Each stage is with parameters, and requires a public empty constructor for restoration. + """ + + @abstractmethod + def save(self, path: str) -> None: + """ + Saves this stage to the given path. + """ + pass + + @classmethod + @abstractmethod + def load(cls, env: StreamExecutionEnvironment, path: str) -> T: + """ + Instantiates a new stage instance based on the data read from the given path. + """ + pass + + +class AlgoOperator(Stage[T], ABC): + """ + An AlgoOperator takes a list of tables as inputs and produces a list of tables as results. It + can be used to encode generic multi-input multi-output computation logic. + """ + + @abstractmethod + def transform(self, *inputs: Table) -> List[Table]: + """ + Applies the AlgoOperator on the given input tables and returns the result tables. + + :param inputs: A list of tables. + :return: A list of tables. + """ + pass + + +class Transformer(AlgoOperator[T], ABC): + """ + A Transformer is an AlgoOperator with the semantic difference that it encodes the Transformation + logic, such that a record in the output typically corresponds to one record in the input. In + contrast, an AlgoOperator is a better fit to express aggregation logic where a record in the + output could be computed from an arbitrary number of records in the input. + """ + pass + + +class Model(Transformer[T], ABC): + """ + A Model is typically generated by invoking :func:`~Estimator.fit`. A Model is a Transformer with + the extra APIs to set and get model data. + """ + + def set_model_data(self, *inputs: Table) -> None: + raise Exception("This operation is not supported.") + + def get_model_data(self) -> None: + """ + Gets a list of tables representing the model data. Each table could be an unbounded stream + of model data changes. + + :return: A list of tables. + """ + raise Exception("This operation is not supported.") + + +class Estimator(Generic[E, M], Stage[E], ABC): + """ + Estimators are responsible for training and generating Models. + """ + + def fit(self, *inputs: Table) -> Model[M]: + """ + Trains on the given inputs and produces a Model. + + :param inputs: A list of tables. + :return: A Model. + """ + pass + + +class PipelineModel(Model): + """ + A PipelineModel acts as a Model. It consists of an ordered list of stages, each of which could + be a Model, Transformer or AlgoOperator. + """ + + def __init__(self, stages: List[Stage]): + self._stages = stages + + def transform(self, *inputs: Table) -> List[Table]: + """ + Applies all stages in this PipelineModel on the input tables in order. The output of one + stage is used as the input of the next stage (if any). The output of the last stage is + returned as the result of this method. + + :param inputs: A list of tables. + :return: A list of tables. + """ + for stage in self._stages: + if isinstance(stage, AlgoOperator): + inputs = stage.transform(*inputs) + else: + raise TypeError(f"The stage {stage} must be an AlgoOperator.") + return list(inputs) + + def save(self, path: str) -> None: + from apache_flink_ml.ml.util import read_write_utils + read_write_utils.save_pipeline(self, self._stages, path) + + @classmethod + def load(cls, env: StreamExecutionEnvironment, path: str) -> 'PipelineModel': + from apache_flink_ml.ml.util import read_write_utils + return PipelineModel(read_write_utils.load_pipeline(env, path)) + + def get_param_map(self): + return {} + + +class Pipeline(Estimator[E, PipelineModel]): + """ + A Pipeline acts as an Estimator. It consists of an ordered list of stages, each of which could + be an Estimator, Model, Transformer or AlgoOperator. + """ + + def __init__(self, stages: List[Stage]): + self._stages = stages + + def fit(self, *inputs: Table) -> PipelineModel: + """ + Trains the pipeline to fit on the given tables. + + This method goes through all stages of this pipeline in order and does the following on + each stage until the last Estimator (inclusive). + + <ul> + <li> If a stage is an Estimator, invoke :func:`~Estimator.fit` with the input + tables to generate a Model. And if there is Estimator after this stage, transform + the input tables using the generated Model to get result tables, then pass the + result tables to the next stage as inputs. + <li> If a stage is an AlgoOperator AND there is Estimator after this stage, transform + the input tables using this stage to get result tables, then pass the result tables + to the next stage as inputs. + </ul> + + After all the Estimators are trained to fit their input tables, a new PipelineModel will + be created with the same stages in this pipeline, except that all the Estimators in the + PipelineModel are replaced with the models generated in the above process. + + :param inputs: A list of tables. + :return: A PipelineModel. + """ + last_estimator_idx = -1 + for i, stage in enumerate(self._stages): + if isinstance(stage, Estimator): + last_estimator_idx = i + + model_stages = [] + last_inputs = inputs + for i, stage in enumerate(self._stages): + if not isinstance(stage, AlgoOperator): Review comment: If there are no other non-`AlgoOperator` inherited from `Stage` class, we can remove this check. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
