zhipeng93 commented on a change in pull request #32: URL: https://github.com/apache/flink-ml/pull/32#discussion_r762744664
########## File path: flink-ml-lib/src/main/java/org/apache/flink/ml/classification/naivebayes/NaiveBayesModelData.java ########## @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.classification.naivebayes; + +import org.apache.flink.api.common.functions.MapFunction; +import org.apache.flink.api.common.serialization.Encoder; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.file.src.reader.SimpleStreamFormat; +import org.apache.flink.core.fs.FSDataInputStream; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableImpl; +import org.apache.flink.types.Row; + +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +/** + * The model data of {@link NaiveBayesModel}. + * + * <p>This class also provides methods to convert model data between Table and Datastream, and + * classes to save/load model data. + */ +public class NaiveBayesModelData implements Serializable { + public final Map<Double, Double>[][] theta; + public final double[] piArray; + public final int[] labels; + + public NaiveBayesModelData(Map<Double, Double>[][] theta, double[] piArray, int[] labels) { + this.theta = theta; + this.piArray = piArray; + this.labels = labels; + } + + /** Converts the provided modelData Datastream into corresponding Table. */ + public static Table getModelDataTable(DataStream<NaiveBayesModelData> stream) { + StreamTableEnvironment tEnv = + StreamTableEnvironment.create(stream.getExecutionEnvironment()); + return tEnv.fromDataStream(stream); + } + + /** Converts the provided modelData Table into corresponding DataStream. */ + public static DataStream<NaiveBayesModelData> getModelDataStream(Table table) { + StreamTableEnvironment tEnv = + (StreamTableEnvironment) ((TableImpl) table).getTableEnvironment(); + return tEnv.toDataStream(table) + .map( + (MapFunction<Row, NaiveBayesModelData>) + row -> (NaiveBayesModelData) row.getField("f0")); + } + + /** Encoder for the {@link NaiveBayesModelData}. */ + public static class ModelDataEncoder implements Encoder<NaiveBayesModelData> { + @Override + public void encode(NaiveBayesModelData modelData, OutputStream outputStream) { + Output output = new Output(outputStream); + + output.writeInt(modelData.labels.length); Review comment: nits: can you use Flink's serializer? Similar as` LogisticRegressionModelData#ModelDataEncoder` ########## File path: flink-ml-lib/src/main/java/org/apache/flink/ml/classification/naivebayes/NaiveBayesModel.java ########## @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.classification.naivebayes; + +import org.apache.flink.api.common.eventtime.WatermarkStrategy; +import org.apache.flink.api.common.functions.AbstractRichFunction; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.connector.source.Source; +import org.apache.flink.api.java.typeutils.RowTypeInfo; +import org.apache.flink.connector.file.sink.FileSink; +import org.apache.flink.connector.file.src.FileSource; +import org.apache.flink.core.fs.Path; +import org.apache.flink.ml.api.Model; +import org.apache.flink.ml.common.broadcast.BroadcastUtils; +import org.apache.flink.ml.common.datastream.TableUtils; +import org.apache.flink.ml.linalg.BLAS; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.linalg.Vectors; +import org.apache.flink.ml.param.Param; +import org.apache.flink.ml.util.ParamUtils; +import org.apache.flink.ml.util.ReadWriteUtils; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.BasePathBucketAssigner; +import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy; +import org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator; +import org.apache.flink.streaming.api.operators.OneInputStreamOperator; +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableImpl; +import org.apache.flink.types.Row; +import org.apache.flink.util.Preconditions; + +import org.apache.commons.lang3.ArrayUtils; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** A Model which classifies data using the model data computed by {@link NaiveBayes}. */ +public class NaiveBayesModel + implements Model<NaiveBayesModel>, NaiveBayesModelParams<NaiveBayesModel> { + private final Map<Param<?>, Object> paramMap = new HashMap<>(); + private Table modelDataTable; + + public NaiveBayesModel() { + ParamUtils.initializeMapWithDefaultValues(paramMap, this); + } + + @Override + public Table[] transform(Table... inputs) { + Preconditions.checkArgument(inputs.length == 1); + + final String predictionCol = getPredictionCol(); + final String featuresCol = getFeaturesCol(); + final String broadcastModelKey = "NaiveBayesModelStream"; + + RowTypeInfo inputTypeInfo = TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema()); + RowTypeInfo outputTypeInfo = + new RowTypeInfo( + ArrayUtils.addAll( + inputTypeInfo.getFieldTypes(), TypeInformation.of(Integer.class)), + ArrayUtils.addAll(inputTypeInfo.getFieldNames(), predictionCol)); + + StreamTableEnvironment tEnv = + (StreamTableEnvironment) ((TableImpl) modelDataTable).getTableEnvironment(); + DataStream<NaiveBayesModelData> modelDataStream = + NaiveBayesModelData.getModelDataStream(modelDataTable); + DataStream<Row> input = tEnv.toDataStream(inputs[0]); + + Map<String, DataStream<?>> broadcastMap = new HashMap<>(); + broadcastMap.put(broadcastModelKey, modelDataStream); + + Function<List<DataStream<?>>, DataStream<Row>> function = + dataStreams -> { + DataStream stream = dataStreams.get(0); + return stream.transform( + this.getClass().getSimpleName(), + outputTypeInfo, + new PredictLabelOperator(featuresCol, broadcastModelKey)); + }; + DataStream<Row> output = + BroadcastUtils.withBroadcastStream( + Collections.singletonList(input), broadcastMap, function); + + Table outputTable = tEnv.fromDataStream(output); + + return new Table[] {outputTable}; + } + + @Override + public void save(String path) throws IOException { + String dataPath = ReadWriteUtils.getDataPath(path); + FileSink<NaiveBayesModelData> sink = Review comment: nits: can you checkout `LogisticRegressionModelData#save/load`? I am not sure which one is better, but I think we should follow same programming pattern. ########## File path: flink-ml-lib/src/main/java/org/apache/flink/ml/classification/naivebayes/NaiveBayesModel.java ########## @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.classification.naivebayes; + +import org.apache.flink.api.common.eventtime.WatermarkStrategy; +import org.apache.flink.api.common.functions.AbstractRichFunction; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.connector.source.Source; +import org.apache.flink.api.java.typeutils.RowTypeInfo; +import org.apache.flink.connector.file.sink.FileSink; +import org.apache.flink.connector.file.src.FileSource; +import org.apache.flink.core.fs.Path; +import org.apache.flink.ml.api.Model; +import org.apache.flink.ml.common.broadcast.BroadcastUtils; +import org.apache.flink.ml.common.datastream.TableUtils; +import org.apache.flink.ml.linalg.BLAS; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.linalg.Vectors; +import org.apache.flink.ml.param.Param; +import org.apache.flink.ml.util.ParamUtils; +import org.apache.flink.ml.util.ReadWriteUtils; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.BasePathBucketAssigner; +import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy; +import org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator; +import org.apache.flink.streaming.api.operators.OneInputStreamOperator; +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableImpl; +import org.apache.flink.types.Row; +import org.apache.flink.util.Preconditions; + +import org.apache.commons.lang3.ArrayUtils; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** A Model which classifies data using the model data computed by {@link NaiveBayes}. */ +public class NaiveBayesModel + implements Model<NaiveBayesModel>, NaiveBayesModelParams<NaiveBayesModel> { + private final Map<Param<?>, Object> paramMap = new HashMap<>(); + private Table modelDataTable; + + public NaiveBayesModel() { + ParamUtils.initializeMapWithDefaultValues(paramMap, this); + } + + @Override + public Table[] transform(Table... inputs) { + Preconditions.checkArgument(inputs.length == 1); + + final String predictionCol = getPredictionCol(); + final String featuresCol = getFeaturesCol(); + final String broadcastModelKey = "NaiveBayesModelStream"; + + RowTypeInfo inputTypeInfo = TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema()); + RowTypeInfo outputTypeInfo = + new RowTypeInfo( + ArrayUtils.addAll( + inputTypeInfo.getFieldTypes(), TypeInformation.of(Integer.class)), + ArrayUtils.addAll(inputTypeInfo.getFieldNames(), predictionCol)); + + StreamTableEnvironment tEnv = + (StreamTableEnvironment) ((TableImpl) modelDataTable).getTableEnvironment(); + DataStream<NaiveBayesModelData> modelDataStream = + NaiveBayesModelData.getModelDataStream(modelDataTable); + DataStream<Row> input = tEnv.toDataStream(inputs[0]); + + Map<String, DataStream<?>> broadcastMap = new HashMap<>(); + broadcastMap.put(broadcastModelKey, modelDataStream); + + Function<List<DataStream<?>>, DataStream<Row>> function = + dataStreams -> { + DataStream stream = dataStreams.get(0); + return stream.transform( + this.getClass().getSimpleName(), + outputTypeInfo, + new PredictLabelOperator(featuresCol, broadcastModelKey)); + }; + DataStream<Row> output = + BroadcastUtils.withBroadcastStream( + Collections.singletonList(input), broadcastMap, function); Review comment: nits: can we use `Collections.singletonMap(broadcastModelKey, modelDataStream)` instead? ########## File path: flink-ml-lib/src/main/java/org/apache/flink/ml/classification/naivebayes/NaiveBayesModel.java ########## @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.ml.classification.naivebayes; + +import org.apache.flink.api.common.eventtime.WatermarkStrategy; +import org.apache.flink.api.common.functions.AbstractRichFunction; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.connector.source.Source; +import org.apache.flink.api.java.typeutils.RowTypeInfo; +import org.apache.flink.connector.file.sink.FileSink; +import org.apache.flink.connector.file.src.FileSource; +import org.apache.flink.core.fs.Path; +import org.apache.flink.ml.api.Model; +import org.apache.flink.ml.common.broadcast.BroadcastUtils; +import org.apache.flink.ml.common.datastream.TableUtils; +import org.apache.flink.ml.linalg.BLAS; +import org.apache.flink.ml.linalg.DenseVector; +import org.apache.flink.ml.linalg.Vector; +import org.apache.flink.ml.linalg.Vectors; +import org.apache.flink.ml.param.Param; +import org.apache.flink.ml.util.ParamUtils; +import org.apache.flink.ml.util.ReadWriteUtils; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.BasePathBucketAssigner; +import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy; +import org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator; +import org.apache.flink.streaming.api.operators.OneInputStreamOperator; +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableImpl; +import org.apache.flink.types.Row; +import org.apache.flink.util.Preconditions; + +import org.apache.commons.lang3.ArrayUtils; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** A Model which classifies data using the model data computed by {@link NaiveBayes}. */ +public class NaiveBayesModel + implements Model<NaiveBayesModel>, NaiveBayesModelParams<NaiveBayesModel> { + private final Map<Param<?>, Object> paramMap = new HashMap<>(); + private Table modelDataTable; + + public NaiveBayesModel() { + ParamUtils.initializeMapWithDefaultValues(paramMap, this); + } + + @Override + public Table[] transform(Table... inputs) { + Preconditions.checkArgument(inputs.length == 1); + + final String predictionCol = getPredictionCol(); + final String featuresCol = getFeaturesCol(); + final String broadcastModelKey = "NaiveBayesModelStream"; + + RowTypeInfo inputTypeInfo = TableUtils.getRowTypeInfo(inputs[0].getResolvedSchema()); + RowTypeInfo outputTypeInfo = + new RowTypeInfo( + ArrayUtils.addAll( + inputTypeInfo.getFieldTypes(), TypeInformation.of(Integer.class)), + ArrayUtils.addAll(inputTypeInfo.getFieldNames(), predictionCol)); + + StreamTableEnvironment tEnv = + (StreamTableEnvironment) ((TableImpl) modelDataTable).getTableEnvironment(); + DataStream<NaiveBayesModelData> modelDataStream = + NaiveBayesModelData.getModelDataStream(modelDataTable); + DataStream<Row> input = tEnv.toDataStream(inputs[0]); + + Map<String, DataStream<?>> broadcastMap = new HashMap<>(); + broadcastMap.put(broadcastModelKey, modelDataStream); + + Function<List<DataStream<?>>, DataStream<Row>> function = + dataStreams -> { + DataStream stream = dataStreams.get(0); + return stream.transform( + this.getClass().getSimpleName(), + outputTypeInfo, + new PredictLabelOperator(featuresCol, broadcastModelKey)); + }; + DataStream<Row> output = + BroadcastUtils.withBroadcastStream( + Collections.singletonList(input), broadcastMap, function); + + Table outputTable = tEnv.fromDataStream(output); + + return new Table[] {outputTable}; + } + + @Override + public void save(String path) throws IOException { + String dataPath = ReadWriteUtils.getDataPath(path); + FileSink<NaiveBayesModelData> sink = + FileSink.forRowFormat( + new Path(dataPath), new NaiveBayesModelData.ModelDataEncoder()) + .withRollingPolicy(OnCheckpointRollingPolicy.build()) + .withBucketAssigner(new BasePathBucketAssigner<>()) + .build(); + NaiveBayesModelData.getModelDataStream(modelDataTable).sinkTo(sink); + + ReadWriteUtils.saveMetadata(this, path); + } + + public static NaiveBayesModel load(StreamExecutionEnvironment env, String path) + throws IOException { + Source<NaiveBayesModelData, ?, ?> source = + FileSource.forRecordStreamFormat( + new NaiveBayesModelData.ModelDataStreamFormat(), + ReadWriteUtils.getDataPaths(path)) + .build(); + NaiveBayesModel model = ReadWriteUtils.loadStageParam(path); + DataStream<NaiveBayesModelData> modelData = + env.fromSource(source, WatermarkStrategy.noWatermarks(), "modelData"); + model.setModelData(NaiveBayesModelData.getModelDataTable(modelData)); + + return model; + } + + @Override + public Map<Param<?>, Object> getParamMap() { + return paramMap; + } + + @Override + public NaiveBayesModel setModelData(Table... inputs) { + Preconditions.checkArgument(inputs.length == 1); + modelDataTable = inputs[0]; + return this; + } + + @Override + public Table[] getModelData() { + return new Table[] {modelDataTable}; + } + + private static class PredictLabelOperator + extends AbstractUdfStreamOperator<Row, AbstractRichFunction> + implements OneInputStreamOperator<Row, Row> { + private final String featuresCol; + private final String broadcastModelKey; + + public PredictLabelOperator(String featuresCol, String broadcastModelKey) { + super(new AbstractRichFunction() {}); + this.featuresCol = featuresCol; + this.broadcastModelKey = broadcastModelKey; + } + + @Override + public void processElement(StreamRecord<Row> streamRecord) { + Row inputRow = streamRecord.getValue(); + NaiveBayesModelData modelData = Review comment: nits: can you cache the `modelData` instead of accessing repeatly from the broadcast context? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
