Repository: spark Updated Branches: refs/heads/branch-2.4 80567fad4 -> 904192ad1
[SPARK-25345][ML] Deprecate public APIs from ImageSchema ## What changes were proposed in this pull request? Deprecate public APIs from ImageSchema. ## How was this patch tested? N/A Closes #22349 from WeichenXu123/image_api_deprecate. Authored-by: WeichenXu <weichen...@databricks.com> Signed-off-by: Xiangrui Meng <m...@databricks.com> (cherry picked from commit 08c02e637ac601df2fe890b8b5a7a049bdb4541b) Signed-off-by: Xiangrui Meng <m...@databricks.com> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/904192ad Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/904192ad Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/904192ad Branch: refs/heads/branch-2.4 Commit: 904192ad18ff09cc5874e09b03447dd5f7754963 Parents: 80567fa Author: WeichenXu <weichen...@databricks.com> Authored: Sat Sep 8 09:09:14 2018 -0700 Committer: Xiangrui Meng <m...@databricks.com> Committed: Sat Sep 8 09:09:33 2018 -0700 ---------------------------------------------------------------------- .../main/scala/org/apache/spark/ml/image/ImageSchema.scala | 4 ++++ python/pyspark/ml/image.py | 8 +++++++- 2 files changed, 11 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/904192ad/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala ---------------------------------------------------------------------- diff --git a/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala b/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala index dcc40b6..0b13eef 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala @@ -198,6 +198,8 @@ object ImageSchema { * @return DataFrame with a single column "image" of images; * see ImageSchema for the details */ + @deprecated("use `spark.read.format(\"image\").load(path)` and this `readImages` will be " + + "removed in 3.0.0.", "2.4.0") def readImages(path: String): DataFrame = readImages(path, null, false, -1, false, 1.0, 0) /** @@ -218,6 +220,8 @@ object ImageSchema { * @return DataFrame with a single column "image" of images; * see ImageSchema for the details */ + @deprecated("use `spark.read.format(\"image\").load(path)` and this `readImages` will be " + + "removed in 3.0.0.", "2.4.0") def readImages( path: String, sparkSession: SparkSession, http://git-wip-us.apache.org/repos/asf/spark/blob/904192ad/python/pyspark/ml/image.py ---------------------------------------------------------------------- diff --git a/python/pyspark/ml/image.py b/python/pyspark/ml/image.py index ef6785b..edb90a3 100644 --- a/python/pyspark/ml/image.py +++ b/python/pyspark/ml/image.py @@ -25,8 +25,10 @@ """ import sys +import warnings import numpy as np + from pyspark import SparkContext from pyspark.sql.types import Row, _create_row, _parse_datatype_json_string from pyspark.sql import DataFrame, SparkSession @@ -207,6 +209,9 @@ class _ImageSchema(object): .. note:: If sample ratio is less than 1, sampling uses a PathFilter that is efficient but potentially non-deterministic. + .. note:: Deprecated in 2.4.0. Use `spark.read.format("image").load(path)` instead and + this `readImages` will be removed in 3.0.0. + :param str path: Path to the image directory. :param bool recursive: Recursive search flag. :param int numPartitions: Number of DataFrame partitions. @@ -222,7 +227,8 @@ class _ImageSchema(object): .. versionadded:: 2.3.0 """ - + warnings.warn("`ImageSchema.readImage` is deprecated. " + + "Use `spark.read.format(\"image\").load(path)` instead.", DeprecationWarning) spark = SparkSession.builder.getOrCreate() image_schema = spark._jvm.org.apache.spark.ml.image.ImageSchema jsession = spark._jsparkSession --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org