[SPARK-26026][BUILD] Published Scaladoc jars missing from Maven Central

## What changes were proposed in this pull request?

This restores scaladoc artifact generation, which got dropped with the Scala 
2.12 update. The change looks large, but is almost all due to needing to make 
the InterfaceStability annotations top-level classes (i.e. 
`InterfaceStability.Stable` -> `Stable`), unfortunately. A few inner class 
references had to be qualified too.

Lots of scaladoc warnings now reappear. We can choose to disable generation by 
default and enable for releases, later.

## How was this patch tested?

N/A; build runs scaladoc now.

Closes #23069 from srowen/SPARK-26026.

Authored-by: Sean Owen <[email protected]>
Signed-off-by: Sean Owen <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/630e25e3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/630e25e3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/630e25e3

Branch: refs/heads/master
Commit: 630e25e35506c02a0b1e202ef82b1b0f69e50966
Parents: bbbdaa8
Author: Sean Owen <[email protected]>
Authored: Mon Nov 19 08:06:33 2018 -0600
Committer: Sean Owen <[email protected]>
Committed: Mon Nov 19 08:06:33 2018 -0600

----------------------------------------------------------------------
 .../network/protocol/ChunkFetchFailure.java     |  2 +-
 .../network/protocol/ChunkFetchRequest.java     |  2 +-
 .../network/protocol/ChunkFetchSuccess.java     |  2 +-
 .../spark/network/protocol/OneWayMessage.java   |  2 +-
 .../spark/network/protocol/RpcFailure.java      |  2 +-
 .../spark/network/protocol/RpcRequest.java      |  2 +-
 .../spark/network/protocol/RpcResponse.java     |  2 +-
 .../spark/network/protocol/StreamFailure.java   |  2 +-
 .../spark/network/protocol/StreamRequest.java   |  2 +-
 .../spark/network/protocol/StreamResponse.java  |  2 +-
 .../spark/network/protocol/UploadStream.java    |  2 +-
 .../apache/spark/network/sasl/SaslMessage.java  |  3 +-
 .../network/shuffle/RetryingBlockFetcher.java   |  2 +-
 .../org/apache/spark/annotation/Evolving.java   | 30 ++++++++++
 .../spark/annotation/InterfaceStability.java    | 58 ------------------
 .../org/apache/spark/annotation/Stable.java     | 31 ++++++++++
 .../org/apache/spark/annotation/Unstable.java   | 30 ++++++++++
 .../streaming/kinesis/KinesisInputDStream.scala |  6 +-
 .../streaming/kinesis/SparkAWSCredentials.scala |  9 ++-
 .../spark/launcher/AbstractAppHandle.java       | 12 ++--
 .../org/apache/spark/ml/util/ReadWrite.scala    | 10 ++--
 pom.xml                                         |  8 ++-
 .../java/org/apache/spark/sql/RowFactory.java   |  4 +-
 .../sql/execution/UnsafeExternalRowSorter.java  | 10 ++--
 .../spark/sql/streaming/GroupStateTimeout.java  |  4 +-
 .../apache/spark/sql/streaming/OutputMode.java  |  4 +-
 .../org/apache/spark/sql/types/DataTypes.java   |  4 +-
 .../spark/sql/types/SQLUserDefinedType.java     |  4 +-
 .../apache/spark/sql/AnalysisException.scala    |  5 +-
 .../scala/org/apache/spark/sql/Encoder.scala    |  5 +-
 .../scala/org/apache/spark/sql/Encoders.scala   |  4 +-
 .../main/scala/org/apache/spark/sql/Row.scala   |  6 +-
 .../spark/sql/types/AbstractDataType.scala      |  4 +-
 .../org/apache/spark/sql/types/ArrayType.scala  |  6 +-
 .../org/apache/spark/sql/types/BinaryType.scala |  7 +--
 .../apache/spark/sql/types/BooleanType.scala    |  7 +--
 .../org/apache/spark/sql/types/ByteType.scala   |  6 +-
 .../spark/sql/types/CalendarIntervalType.scala  |  6 +-
 .../org/apache/spark/sql/types/DataType.scala   |  6 +-
 .../org/apache/spark/sql/types/DateType.scala   |  6 +-
 .../org/apache/spark/sql/types/Decimal.scala    |  6 +-
 .../apache/spark/sql/types/DecimalType.scala    |  7 +--
 .../org/apache/spark/sql/types/DoubleType.scala |  6 +-
 .../org/apache/spark/sql/types/FloatType.scala  |  6 +-
 .../apache/spark/sql/types/IntegerType.scala    |  6 +-
 .../org/apache/spark/sql/types/LongType.scala   |  6 +-
 .../org/apache/spark/sql/types/MapType.scala    |  6 +-
 .../org/apache/spark/sql/types/Metadata.scala   |  8 +--
 .../org/apache/spark/sql/types/NullType.scala   |  7 +--
 .../org/apache/spark/sql/types/ObjectType.scala |  6 +-
 .../org/apache/spark/sql/types/ShortType.scala  |  6 +-
 .../org/apache/spark/sql/types/StringType.scala |  6 +-
 .../apache/spark/sql/types/StructField.scala    |  4 +-
 .../org/apache/spark/sql/types/StructType.scala |  8 +--
 .../apache/spark/sql/types/TimestampType.scala  |  6 +-
 .../FlatMapGroupsWithStateFunction.java         |  4 +-
 .../function/MapGroupsWithStateFunction.java    |  4 +-
 .../java/org/apache/spark/sql/SaveMode.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF0.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF1.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF10.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF11.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF12.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF13.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF14.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF15.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF16.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF17.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF18.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF19.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF2.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF20.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF21.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF22.java    |  4 +-
 .../org/apache/spark/sql/api/java/UDF3.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF4.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF5.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF6.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF7.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF8.java     |  4 +-
 .../org/apache/spark/sql/api/java/UDF9.java     |  4 +-
 ...chemaColumnConvertNotSupportedException.java |  4 +-
 .../spark/sql/expressions/javalang/typed.java   |  4 +-
 .../sources/v2/BatchReadSupportProvider.java    |  4 +-
 .../sources/v2/BatchWriteSupportProvider.java   |  4 +-
 .../v2/ContinuousReadSupportProvider.java       |  4 +-
 .../spark/sql/sources/v2/DataSourceOptions.java |  4 +-
 .../spark/sql/sources/v2/DataSourceV2.java      |  4 +-
 .../v2/MicroBatchReadSupportProvider.java       |  4 +-
 .../sql/sources/v2/SessionConfigSupport.java    |  4 +-
 .../v2/StreamingWriteSupportProvider.java       |  4 +-
 .../sql/sources/v2/reader/BatchReadSupport.java |  4 +-
 .../sql/sources/v2/reader/InputPartition.java   |  4 +-
 .../sql/sources/v2/reader/PartitionReader.java  |  4 +-
 .../v2/reader/PartitionReaderFactory.java       |  4 +-
 .../sql/sources/v2/reader/ReadSupport.java      |  4 +-
 .../spark/sql/sources/v2/reader/ScanConfig.java |  4 +-
 .../sources/v2/reader/ScanConfigBuilder.java    |  4 +-
 .../spark/sql/sources/v2/reader/Statistics.java |  4 +-
 .../v2/reader/SupportsPushDownFilters.java      |  4 +-
 .../reader/SupportsPushDownRequiredColumns.java |  4 +-
 .../v2/reader/SupportsReportPartitioning.java   |  4 +-
 .../v2/reader/SupportsReportStatistics.java     |  4 +-
 .../partitioning/ClusteredDistribution.java     |  4 +-
 .../v2/reader/partitioning/Distribution.java    |  4 +-
 .../v2/reader/partitioning/Partitioning.java    |  4 +-
 .../streaming/ContinuousPartitionReader.java    |  4 +-
 .../ContinuousPartitionReaderFactory.java       |  4 +-
 .../reader/streaming/ContinuousReadSupport.java |  4 +-
 .../reader/streaming/MicroBatchReadSupport.java |  4 +-
 .../sql/sources/v2/reader/streaming/Offset.java |  4 +-
 .../v2/reader/streaming/PartitionOffset.java    |  4 +-
 .../sources/v2/writer/BatchWriteSupport.java    |  4 +-
 .../spark/sql/sources/v2/writer/DataWriter.java |  4 +-
 .../sources/v2/writer/DataWriterFactory.java    |  4 +-
 .../sources/v2/writer/WriterCommitMessage.java  |  4 +-
 .../streaming/StreamingDataWriterFactory.java   |  4 +-
 .../writer/streaming/StreamingWriteSupport.java |  4 +-
 .../org/apache/spark/sql/streaming/Trigger.java |  4 +-
 .../spark/sql/vectorized/ArrowColumnVector.java |  4 +-
 .../spark/sql/vectorized/ColumnVector.java      |  4 +-
 .../spark/sql/vectorized/ColumnarArray.java     |  4 +-
 .../spark/sql/vectorized/ColumnarBatch.java     |  4 +-
 .../spark/sql/vectorized/ColumnarRow.java       |  4 +-
 .../scala/org/apache/spark/sql/Column.scala     |  8 +--
 .../apache/spark/sql/DataFrameNaFunctions.scala |  5 +-
 .../org/apache/spark/sql/DataFrameReader.scala  |  4 +-
 .../spark/sql/DataFrameStatFunctions.scala      |  4 +-
 .../org/apache/spark/sql/DataFrameWriter.scala  |  4 +-
 .../scala/org/apache/spark/sql/Dataset.scala    | 62 ++++++++++----------
 .../org/apache/spark/sql/DatasetHolder.scala    |  4 +-
 .../apache/spark/sql/ExperimentalMethods.scala  |  4 +-
 .../org/apache/spark/sql/ForeachWriter.scala    |  4 +-
 .../spark/sql/KeyValueGroupedDataset.scala      | 16 ++---
 .../spark/sql/RelationalGroupedDataset.scala    |  4 +-
 .../org/apache/spark/sql/RuntimeConfig.scala    |  5 +-
 .../scala/org/apache/spark/sql/SQLContext.scala | 34 +++++------
 .../org/apache/spark/sql/SQLImplicits.scala     |  4 +-
 .../org/apache/spark/sql/SparkSession.scala     | 48 +++++++--------
 .../spark/sql/SparkSessionExtensions.scala      |  4 +-
 .../org/apache/spark/sql/UDFRegistration.scala  |  4 +-
 .../org/apache/spark/sql/catalog/Catalog.scala  | 16 ++---
 .../apache/spark/sql/catalog/interface.scala    | 10 ++--
 .../sql/execution/streaming/Triggers.scala      |  4 +-
 .../continuous/ContinuousTrigger.scala          |  6 +-
 .../spark/sql/expressions/Aggregator.scala      |  6 +-
 .../sql/expressions/UserDefinedFunction.scala   |  4 +-
 .../apache/spark/sql/expressions/Window.scala   |  6 +-
 .../spark/sql/expressions/WindowSpec.scala      |  4 +-
 .../spark/sql/expressions/scalalang/typed.scala |  4 +-
 .../org/apache/spark/sql/expressions/udaf.scala |  6 +-
 .../scala/org/apache/spark/sql/functions.scala  |  4 +-
 .../sql/internal/BaseSessionStateBuilder.scala  |  4 +-
 .../spark/sql/internal/SessionState.scala       |  6 +-
 .../apache/spark/sql/jdbc/JdbcDialects.scala    |  9 ++-
 .../scala/org/apache/spark/sql/package.scala    |  4 +-
 .../org/apache/spark/sql/sources/filters.scala  | 34 +++++------
 .../apache/spark/sql/sources/interfaces.scala   | 26 ++++----
 .../spark/sql/streaming/DataStreamReader.scala  |  4 +-
 .../spark/sql/streaming/DataStreamWriter.scala  |  8 +--
 .../apache/spark/sql/streaming/GroupState.scala |  5 +-
 .../spark/sql/streaming/ProcessingTime.scala    |  6 +-
 .../spark/sql/streaming/StreamingQuery.scala    |  4 +-
 .../sql/streaming/StreamingQueryException.scala |  4 +-
 .../sql/streaming/StreamingQueryListener.scala  | 14 ++---
 .../sql/streaming/StreamingQueryManager.scala   |  4 +-
 .../sql/streaming/StreamingQueryStatus.scala    |  4 +-
 .../apache/spark/sql/streaming/progress.scala   | 10 ++--
 .../spark/sql/util/QueryExecutionListener.scala |  6 +-
 .../apache/hive/service/cli/thrift/TColumn.java |  2 +-
 .../hive/service/cli/thrift/TColumnValue.java   |  2 +-
 .../hive/service/cli/thrift/TGetInfoValue.java  |  2 +-
 .../hive/service/cli/thrift/TTypeEntry.java     |  2 +-
 .../service/cli/thrift/TTypeQualifierValue.java |  2 +-
 .../apache/hive/service/AbstractService.java    |  8 +--
 .../org/apache/hive/service/FilterService.java  |  2 +-
 .../sql/hive/HiveSessionStateBuilder.scala      |  4 +-
 177 files changed, 590 insertions(+), 563 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
index 7b28a9a..a7afbfa 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
@@ -33,7 +33,7 @@ public final class ChunkFetchFailure extends AbstractMessage 
implements Response
   }
 
   @Override
-  public Type type() { return Type.ChunkFetchFailure; }
+  public Message.Type type() { return Type.ChunkFetchFailure; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
index 26d063f..fe54fcc 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
@@ -32,7 +32,7 @@ public final class ChunkFetchRequest extends AbstractMessage 
implements RequestM
   }
 
   @Override
-  public Type type() { return Type.ChunkFetchRequest; }
+  public Message.Type type() { return Type.ChunkFetchRequest; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
index 94c2ac9..d5c9a9b 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
@@ -39,7 +39,7 @@ public final class ChunkFetchSuccess extends 
AbstractResponseMessage {
   }
 
   @Override
-  public Type type() { return Type.ChunkFetchSuccess; }
+  public Message.Type type() { return Type.ChunkFetchSuccess; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
index f7ffb1b..1632fb9 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
@@ -34,7 +34,7 @@ public final class OneWayMessage extends AbstractMessage 
implements RequestMessa
   }
 
   @Override
-  public Type type() { return Type.OneWayMessage; }
+  public Message.Type type() { return Type.OneWayMessage; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
index a76624e..6106190 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
@@ -31,7 +31,7 @@ public final class RpcFailure extends AbstractMessage 
implements ResponseMessage
   }
 
   @Override
-  public Type type() { return Type.RpcFailure; }
+  public Message.Type type() { return Type.RpcFailure; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
index 2b30920..cc1bb95 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
@@ -38,7 +38,7 @@ public final class RpcRequest extends AbstractMessage 
implements RequestMessage
   }
 
   @Override
-  public Type type() { return Type.RpcRequest; }
+  public Message.Type type() { return Type.RpcRequest; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
index d73014e..c03291e 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
@@ -33,7 +33,7 @@ public final class RpcResponse extends 
AbstractResponseMessage {
   }
 
   @Override
-  public Type type() { return Type.RpcResponse; }
+  public Message.Type type() { return Type.RpcResponse; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
index 258ef81..68fcfa7 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
@@ -33,7 +33,7 @@ public final class StreamFailure extends AbstractMessage 
implements ResponseMess
   }
 
   @Override
-  public Type type() { return Type.StreamFailure; }
+  public Message.Type type() { return Type.StreamFailure; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
index dc183c0..1b135af 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
@@ -34,7 +34,7 @@ public final class StreamRequest extends AbstractMessage 
implements RequestMessa
    }
 
   @Override
-  public Type type() { return Type.StreamRequest; }
+  public Message.Type type() { return Type.StreamRequest; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
index 50b8116..568108c 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
@@ -40,7 +40,7 @@ public final class StreamResponse extends 
AbstractResponseMessage {
   }
 
   @Override
-  public Type type() { return Type.StreamResponse; }
+  public Message.Type type() { return Type.StreamResponse; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
index fa1d26e..7d21151 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
@@ -52,7 +52,7 @@ public final class UploadStream extends AbstractMessage 
implements RequestMessag
   }
 
   @Override
-  public Type type() { return Type.UploadStream; }
+  public Message.Type type() { return Type.UploadStream; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
----------------------------------------------------------------------
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
index 7331c2b..1b03300 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
@@ -23,6 +23,7 @@ import io.netty.buffer.Unpooled;
 import org.apache.spark.network.buffer.NettyManagedBuffer;
 import org.apache.spark.network.protocol.Encoders;
 import org.apache.spark.network.protocol.AbstractMessage;
+import org.apache.spark.network.protocol.Message;
 
 /**
  * Encodes a Sasl-related message which is attempting to authenticate using 
some credentials tagged
@@ -46,7 +47,7 @@ class SaslMessage extends AbstractMessage {
   }
 
   @Override
-  public Type type() { return Type.User; }
+  public Message.Type type() { return Type.User; }
 
   @Override
   public int encodedLength() {

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
----------------------------------------------------------------------
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
index f309dda..6bf3da9 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RetryingBlockFetcher.java
@@ -101,7 +101,7 @@ public class RetryingBlockFetcher {
 
   public RetryingBlockFetcher(
       TransportConf conf,
-      BlockFetchStarter fetchStarter,
+      RetryingBlockFetcher.BlockFetchStarter fetchStarter,
       String[] blockIds,
       BlockFetchingListener listener) {
     this.fetchStarter = fetchStarter;

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/tags/src/main/java/org/apache/spark/annotation/Evolving.java
----------------------------------------------------------------------
diff --git 
a/common/tags/src/main/java/org/apache/spark/annotation/Evolving.java 
b/common/tags/src/main/java/org/apache/spark/annotation/Evolving.java
new file mode 100644
index 0000000..87e8948
--- /dev/null
+++ b/common/tags/src/main/java/org/apache/spark/annotation/Evolving.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * APIs that are meant to evolve towards becoming stable APIs, but are not 
stable APIs yet.
+ * Evolving interfaces can change from one feature release to another release 
(i.e. 2.1 to 2.2).
+ */
+@Documented
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, 
ElementType.PARAMETER,
+  ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
+public @interface Evolving {}

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/tags/src/main/java/org/apache/spark/annotation/InterfaceStability.java
----------------------------------------------------------------------
diff --git 
a/common/tags/src/main/java/org/apache/spark/annotation/InterfaceStability.java 
b/common/tags/src/main/java/org/apache/spark/annotation/InterfaceStability.java
deleted file mode 100644
index 02bcec7..0000000
--- 
a/common/tags/src/main/java/org/apache/spark/annotation/InterfaceStability.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.annotation;
-
-import java.lang.annotation.*;
-
-/**
- * Annotation to inform users of how much to rely on a particular package,
- * class or method not changing over time.
- */
-public class InterfaceStability {
-
-  /**
-   * Stable APIs that retain source and binary compatibility within a major 
release.
-   * These interfaces can change from one major release to another major 
release
-   * (e.g. from 1.0 to 2.0).
-   */
-  @Documented
-  @Retention(RetentionPolicy.RUNTIME)
-  @Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, 
ElementType.PARAMETER,
-    ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
-  public @interface Stable {};
-
-  /**
-   * APIs that are meant to evolve towards becoming stable APIs, but are not 
stable APIs yet.
-   * Evolving interfaces can change from one feature release to another 
release (i.e. 2.1 to 2.2).
-   */
-  @Documented
-  @Retention(RetentionPolicy.RUNTIME)
-  @Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, 
ElementType.PARAMETER,
-    ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
-  public @interface Evolving {};
-
-  /**
-   * Unstable APIs, with no guarantee on stability.
-   * Classes that are unannotated are considered Unstable.
-   */
-  @Documented
-  @Retention(RetentionPolicy.RUNTIME)
-  @Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, 
ElementType.PARAMETER,
-    ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
-  public @interface Unstable {};
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/tags/src/main/java/org/apache/spark/annotation/Stable.java
----------------------------------------------------------------------
diff --git a/common/tags/src/main/java/org/apache/spark/annotation/Stable.java 
b/common/tags/src/main/java/org/apache/spark/annotation/Stable.java
new file mode 100644
index 0000000..b198bfb
--- /dev/null
+++ b/common/tags/src/main/java/org/apache/spark/annotation/Stable.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * Stable APIs that retain source and binary compatibility within a major 
release.
+ * These interfaces can change from one major release to another major release
+ * (e.g. from 1.0 to 2.0).
+ */
+@Documented
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, 
ElementType.PARAMETER,
+  ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
+public @interface Stable {}

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/common/tags/src/main/java/org/apache/spark/annotation/Unstable.java
----------------------------------------------------------------------
diff --git 
a/common/tags/src/main/java/org/apache/spark/annotation/Unstable.java 
b/common/tags/src/main/java/org/apache/spark/annotation/Unstable.java
new file mode 100644
index 0000000..88ee721
--- /dev/null
+++ b/common/tags/src/main/java/org/apache/spark/annotation/Unstable.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * Unstable APIs, with no guarantee on stability.
+ * Classes that are unannotated are considered Unstable.
+ */
+@Documented
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, 
ElementType.PARAMETER,
+  ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
+public @interface Unstable {}

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
----------------------------------------------------------------------
diff --git 
a/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
 
b/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
index 1ffec01..d4a428f 100644
--- 
a/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
+++ 
b/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
@@ -22,7 +22,7 @@ import scala.reflect.ClassTag
 import 
com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream
 import com.amazonaws.services.kinesis.model.Record
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Evolving
 import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.{BlockId, StorageLevel}
 import org.apache.spark.streaming.{Duration, StreamingContext, Time}
@@ -84,14 +84,14 @@ private[kinesis] class KinesisInputDStream[T: ClassTag](
   }
 }
 
[email protected]
+@Evolving
 object KinesisInputDStream {
   /**
    * Builder for [[KinesisInputDStream]] instances.
    *
    * @since 2.2.0
    */
-  @InterfaceStability.Evolving
+  @Evolving
   class Builder {
     // Required params
     private var streamingContext: Option[StreamingContext] = None

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/SparkAWSCredentials.scala
----------------------------------------------------------------------
diff --git 
a/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/SparkAWSCredentials.scala
 
b/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/SparkAWSCredentials.scala
index 9facfe8..dcb60b2 100644
--- 
a/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/SparkAWSCredentials.scala
+++ 
b/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/SparkAWSCredentials.scala
@@ -14,13 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.streaming.kinesis
 
-import scala.collection.JavaConverters._
+package org.apache.spark.streaming.kinesis
 
 import com.amazonaws.auth._
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Evolving
 import org.apache.spark.internal.Logging
 
 /**
@@ -84,14 +83,14 @@ private[kinesis] final case class STSCredentials(
   }
 }
 
[email protected]
+@Evolving
 object SparkAWSCredentials {
   /**
    * Builder for [[SparkAWSCredentials]] instances.
    *
    * @since 2.2.0
    */
-  @InterfaceStability.Evolving
+  @Evolving
   class Builder {
     private var basicCreds: Option[BasicCredentials] = None
     private var stsCreds: Option[STSCredentials] = None

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/launcher/src/main/java/org/apache/spark/launcher/AbstractAppHandle.java
----------------------------------------------------------------------
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/AbstractAppHandle.java 
b/launcher/src/main/java/org/apache/spark/launcher/AbstractAppHandle.java
index 9cbebda..0999cbd 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractAppHandle.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractAppHandle.java
@@ -31,8 +31,8 @@ abstract class AbstractAppHandle implements SparkAppHandle {
   private final LauncherServer server;
 
   private LauncherServer.ServerConnection connection;
-  private List<Listener> listeners;
-  private AtomicReference<State> state;
+  private List<SparkAppHandle.Listener> listeners;
+  private AtomicReference<SparkAppHandle.State> state;
   private volatile String appId;
   private volatile boolean disposed;
 
@@ -42,7 +42,7 @@ abstract class AbstractAppHandle implements SparkAppHandle {
   }
 
   @Override
-  public synchronized void addListener(Listener l) {
+  public synchronized void addListener(SparkAppHandle.Listener l) {
     if (listeners == null) {
       listeners = new CopyOnWriteArrayList<>();
     }
@@ -50,7 +50,7 @@ abstract class AbstractAppHandle implements SparkAppHandle {
   }
 
   @Override
-  public State getState() {
+  public SparkAppHandle.State getState() {
     return state.get();
   }
 
@@ -120,11 +120,11 @@ abstract class AbstractAppHandle implements 
SparkAppHandle {
     }
   }
 
-  void setState(State s) {
+  void setState(SparkAppHandle.State s) {
     setState(s, false);
   }
 
-  void setState(State s, boolean force) {
+  void setState(SparkAppHandle.State s, boolean force) {
     if (force) {
       state.set(s);
       fireEvent(false);

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala 
b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
index d985f8c..fbc7be2 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala
@@ -31,7 +31,7 @@ import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 
 import org.apache.spark.{SparkContext, SparkException}
-import org.apache.spark.annotation.{DeveloperApi, InterfaceStability, Since}
+import org.apache.spark.annotation.{DeveloperApi, Since, Unstable}
 import org.apache.spark.internal.Logging
 import org.apache.spark.ml._
 import org.apache.spark.ml.classification.{OneVsRest, OneVsRestModel}
@@ -84,7 +84,7 @@ private[util] sealed trait BaseReadWrite {
  *
  * @since 2.4.0
  */
[email protected]
+@Unstable
 @Since("2.4.0")
 trait MLWriterFormat {
   /**
@@ -108,7 +108,7 @@ trait MLWriterFormat {
  *
  * @since 2.4.0
  */
[email protected]
+@Unstable
 @Since("2.4.0")
 trait MLFormatRegister extends MLWriterFormat {
   /**
@@ -208,7 +208,7 @@ abstract class MLWriter extends BaseReadWrite with Logging {
 /**
  * A ML Writer which delegates based on the requested format.
  */
[email protected]
+@Unstable
 @Since("2.4.0")
 class GeneralMLWriter(stage: PipelineStage) extends MLWriter with Logging {
   private var source: String = "internal"
@@ -291,7 +291,7 @@ trait MLWritable {
  * Trait for classes that provide `GeneralMLWriter`.
  */
 @Since("2.4.0")
[email protected]
+@Unstable
 trait GeneralMLWritable extends MLWritable {
   /**
    * Returns an `MLWriter` instance for this ML instance.

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 59e3d0f..fcec295 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2016,7 +2016,6 @@
         <plugin>
           <groupId>net.alchim31.maven</groupId>
           <artifactId>scala-maven-plugin</artifactId>
-          <!-- 3.3.1 won't work with zinc; fails to find javac from java.home 
-->
           <version>3.4.4</version>
           <executions>
             <execution>
@@ -2037,6 +2036,13 @@
                 <goal>testCompile</goal>
               </goals>
             </execution>
+            <execution>
+              <id>attach-scaladocs</id>
+              <phase>verify</phase>
+              <goals>
+                <goal>doc-jar</goal>
+              </goals>
+            </execution>
           </executions>
           <configuration>
             <scalaVersion>${scala.version}</scalaVersion>

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
index 2ce1fdc..0258e66 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql;
 
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Stable;
 import org.apache.spark.sql.catalyst.expressions.GenericRow;
 
 /**
@@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericRow;
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 public class RowFactory {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
index 1b2f5ee..5395e40 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
@@ -50,7 +50,7 @@ public final class UnsafeExternalRowSorter {
   private long numRowsInserted = 0;
 
   private final StructType schema;
-  private final PrefixComputer prefixComputer;
+  private final UnsafeExternalRowSorter.PrefixComputer prefixComputer;
   private final UnsafeExternalSorter sorter;
 
   public abstract static class PrefixComputer {
@@ -74,7 +74,7 @@ public final class UnsafeExternalRowSorter {
       StructType schema,
       Supplier<RecordComparator> recordComparatorSupplier,
       PrefixComparator prefixComparator,
-      PrefixComputer prefixComputer,
+      UnsafeExternalRowSorter.PrefixComputer prefixComputer,
       long pageSizeBytes,
       boolean canUseRadixSort) throws IOException {
     return new UnsafeExternalRowSorter(schema, recordComparatorSupplier, 
prefixComparator,
@@ -85,7 +85,7 @@ public final class UnsafeExternalRowSorter {
       StructType schema,
       Ordering<InternalRow> ordering,
       PrefixComparator prefixComparator,
-      PrefixComputer prefixComputer,
+      UnsafeExternalRowSorter.PrefixComputer prefixComputer,
       long pageSizeBytes,
       boolean canUseRadixSort) throws IOException {
     Supplier<RecordComparator> recordComparatorSupplier =
@@ -98,9 +98,9 @@ public final class UnsafeExternalRowSorter {
       StructType schema,
       Supplier<RecordComparator> recordComparatorSupplier,
       PrefixComparator prefixComparator,
-      PrefixComputer prefixComputer,
+      UnsafeExternalRowSorter.PrefixComputer prefixComputer,
       long pageSizeBytes,
-      boolean canUseRadixSort) throws IOException {
+      boolean canUseRadixSort) {
     this.schema = schema;
     this.prefixComputer = prefixComputer;
     final SparkEnv sparkEnv = SparkEnv.get();

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/GroupStateTimeout.java
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/GroupStateTimeout.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/GroupStateTimeout.java
index 5f1032d..5f6a46f 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/GroupStateTimeout.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/GroupStateTimeout.java
@@ -17,8 +17,8 @@
 
 package org.apache.spark.sql.streaming;
 
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.annotation.Experimental;
-import org.apache.spark.annotation.InterfaceStability;
 import org.apache.spark.sql.catalyst.plans.logical.*;
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.catalyst.plans.logical.*;
  * @since 2.2.0
  */
 @Experimental
[email protected]
+@Evolving
 public class GroupStateTimeout {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
index 470c128..a3d72a1 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.streaming;
 
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.catalyst.streaming.InternalOutputModes;
 
 /**
@@ -26,7 +26,7 @@ import 
org.apache.spark.sql.catalyst.streaming.InternalOutputModes;
  *
  * @since 2.0.0
  */
[email protected]
+@Evolving
 public class OutputMode {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
index 0f8570f..d786374 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
@@ -19,7 +19,7 @@ package org.apache.spark.sql.types;
 
 import java.util.*;
 
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Stable;
 
 /**
  * To get/create specific data type, users should use singleton objects and 
factory methods
@@ -27,7 +27,7 @@ import org.apache.spark.annotation.InterfaceStability;
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 public class DataTypes {
   /**
    * Gets the StringType object.

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
index 1290614..a543983 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types;
 import java.lang.annotation.*;
 
 import org.apache.spark.annotation.DeveloperApi;
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * ::DeveloperApi::
@@ -31,7 +31,7 @@ import org.apache.spark.annotation.InterfaceStability;
 @DeveloperApi
 @Retention(RetentionPolicy.RUNTIME)
 @Target(ElementType.TYPE)
[email protected]
+@Evolving
 public @interface SQLUserDefinedType {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
index 50ee6cd..f5c8767 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
@@ -17,16 +17,15 @@
 
 package org.apache.spark.sql
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 
-
 /**
  * Thrown when a query fails to analyze, usually because the query itself is 
invalid.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class AnalysisException protected[sql] (
     val message: String,
     val line: Option[Int] = None,

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
index 7b02317..9853a4f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
@@ -20,10 +20,9 @@ package org.apache.spark.sql
 import scala.annotation.implicitNotFound
 import scala.reflect.ClassTag
 
-import org.apache.spark.annotation.{Experimental, InterfaceStability}
+import org.apache.spark.annotation.{Evolving, Experimental}
 import org.apache.spark.sql.types._
 
-
 /**
  * :: Experimental ::
  * Used to convert a JVM object of type `T` to and from the internal Spark SQL 
representation.
@@ -67,7 +66,7 @@ import org.apache.spark.sql.types._
  * @since 1.6.0
  */
 @Experimental
[email protected]
+@Evolving
 @implicitNotFound("Unable to find encoder for type ${T}. An implicit 
Encoder[${T}] is needed to " +
   "store ${T} instances in a Dataset. Primitive types (Int, String, etc) and 
Product types (case " +
   "classes) are supported by importing spark.implicits._  Support for 
serializing other types " +

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
index 8a30c81..42b865c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
@@ -22,7 +22,7 @@ import java.lang.reflect.Modifier
 import scala.reflect.{classTag, ClassTag}
 import scala.reflect.runtime.universe.TypeTag
 
-import org.apache.spark.annotation.{Experimental, InterfaceStability}
+import org.apache.spark.annotation.{Evolving, Experimental}
 import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal
 import org.apache.spark.sql.catalyst.encoders.{encoderFor, ExpressionEncoder}
 import org.apache.spark.sql.catalyst.expressions.{BoundReference, Cast}
@@ -36,7 +36,7 @@ import org.apache.spark.sql.types._
  * @since 1.6.0
  */
 @Experimental
[email protected]
+@Evolving
 object Encoders {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
index 180c2d1..e12bf96 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
@@ -20,14 +20,14 @@ package org.apache.spark.sql
 import scala.collection.JavaConverters._
 import scala.util.hashing.MurmurHash3
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.expressions.GenericRow
 import org.apache.spark.sql.types.StructType
 
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 object Row {
   /**
    * This method can be used to extract fields from a [[Row]] object in a 
pattern match. Example:
@@ -124,7 +124,7 @@ object Row {
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 trait Row extends Serializable {
   /** Number of elements in the Row. */
   def size: Int = length

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index c43cc74..5367ce2 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.types
 
 import scala.reflect.runtime.universe.TypeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.expressions.Expression
 
 /**
@@ -134,7 +134,7 @@ object AtomicType {
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 abstract class NumericType extends AtomicType {
   // Unfortunately we can't get this implicitly as that breaks Spark 
Serialization. In order for
   // implicitly[Numeric[JvmType]] to be valid, we have to change JvmType from 
a type variable to a

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
index 58c75b5..7465569 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
@@ -21,7 +21,7 @@ import scala.math.Ordering
 
 import org.json4s.JsonDSL._
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.util.ArrayData
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.catalyst.util.ArrayData
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 object ArrayType extends AbstractDataType {
   /**
    * Construct a [[ArrayType]] object with the given element type. The 
`containsNull` is true.
@@ -60,7 +60,7 @@ object ArrayType extends AbstractDataType {
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 case class ArrayType(elementType: DataType, containsNull: Boolean) extends 
DataType {
 
   /** No-arg constructor for kryo. */

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
index 032d6b5..cc8b3e6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
@@ -20,15 +20,14 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.util.TypeUtils
 
-
 /**
  * The data type representing `Array[Byte]` values.
  * Please use the singleton `DataTypes.BinaryType`.
  */
[email protected]
+@Stable
 class BinaryType private() extends AtomicType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "BinaryType$" 
in byte code.
@@ -55,5 +54,5 @@ class BinaryType private() extends AtomicType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object BinaryType extends BinaryType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
index 63f354d..5e3de71 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
@@ -20,15 +20,14 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
-
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `Boolean` values. Please use the singleton 
`DataTypes.BooleanType`.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class BooleanType private() extends AtomicType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type 
"BooleanType$" in byte code.
@@ -48,5 +47,5 @@ class BooleanType private() extends AtomicType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object BooleanType extends BooleanType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
index 5854c3f..9d400ee 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
@@ -20,14 +20,14 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `Byte` values. Please use the singleton 
`DataTypes.ByteType`.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class ByteType private() extends IntegralType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "ByteType$" 
in byte code.
@@ -52,5 +52,5 @@ class ByteType private() extends IntegralType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object ByteType extends ByteType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
index 2342036..8e29787 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.types
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing calendar time intervals. The calendar time 
interval is stored
@@ -29,7 +29,7 @@ import org.apache.spark.annotation.InterfaceStability
  *
  * @since 1.5.0
  */
[email protected]
+@Stable
 class CalendarIntervalType private() extends DataType {
 
   override def defaultSize: Int = 16
@@ -40,5 +40,5 @@ class CalendarIntervalType private() extends DataType {
 /**
  * @since 1.5.0
  */
[email protected]
+@Stable
 case object CalendarIntervalType extends CalendarIntervalType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index 33fc4b9..c58f7a2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -26,7 +26,7 @@ import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.analysis.Resolver
 import org.apache.spark.sql.catalyst.expressions.{Cast, Expression}
 import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
@@ -38,7 +38,7 @@ import org.apache.spark.util.Utils
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 abstract class DataType extends AbstractDataType {
   /**
    * Enables matching against DataType for expressions:
@@ -111,7 +111,7 @@ abstract class DataType extends AbstractDataType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 object DataType {
 
   private val FIXED_DECIMAL = """decimal\(\s*(\d+)\s*,\s*(\-?\d+)\s*\)""".r

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
index 9e70dd4..7491014 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * A date type, supporting "0001-01-01" through "9999-12-31".
@@ -31,7 +31,7 @@ import org.apache.spark.annotation.InterfaceStability
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class DateType private() extends AtomicType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "DateType$" 
in byte code.
@@ -53,5 +53,5 @@ class DateType private() extends AtomicType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object DateType extends DateType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 9eed2eb..a3a8446 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import java.lang.{Long => JLong}
 import java.math.{BigInteger, MathContext, RoundingMode}
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Unstable
 import org.apache.spark.sql.AnalysisException
 
 /**
@@ -31,7 +31,7 @@ import org.apache.spark.sql.AnalysisException
  * - If decimalVal is set, it represents the whole decimal value
  * - Otherwise, the decimal value is longVal / (10 ** _scale)
  */
[email protected]
+@Unstable
 final class Decimal extends Ordered[Decimal] with Serializable {
   import org.apache.spark.sql.types.Decimal._
 
@@ -407,7 +407,7 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
   }
 }
 
[email protected]
+@Unstable
 object Decimal {
   val ROUND_HALF_UP = BigDecimal.RoundingMode.HALF_UP
   val ROUND_HALF_EVEN = BigDecimal.RoundingMode.HALF_EVEN

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 15004e4..25eddaf 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -21,11 +21,10 @@ import java.util.Locale
 
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
 
-
 /**
  * The data type representing `java.math.BigDecimal` values.
  * A Decimal that must have fixed precision (the maximum number of digits) and 
scale (the number
@@ -39,7 +38,7 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, 
Literal}
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 case class DecimalType(precision: Int, scale: Int) extends FractionalType {
 
   if (scale > precision) {
@@ -110,7 +109,7 @@ case class DecimalType(precision: Int, scale: Int) extends 
FractionalType {
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 object DecimalType extends AbstractDataType {
   import scala.math.min
 

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
index a5c79ff..afd3353 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
@@ -21,7 +21,7 @@ import scala.math.{Fractional, Numeric, Ordering}
 import scala.math.Numeric.DoubleAsIfIntegral
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.util.Utils
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.util.Utils
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class DoubleType private() extends FractionalType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "DoubleType$" 
in byte code.
@@ -54,5 +54,5 @@ class DoubleType private() extends FractionalType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object DoubleType extends DoubleType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
index 352147e..6d98987 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
@@ -21,7 +21,7 @@ import scala.math.{Fractional, Numeric, Ordering}
 import scala.math.Numeric.FloatAsIfIntegral
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.util.Utils
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.util.Utils
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class FloatType private() extends FractionalType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "FloatType$" 
in byte code.
@@ -55,5 +55,5 @@ class FloatType private() extends FractionalType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object FloatType extends FloatType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
index a85e372..0755202 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
@@ -20,14 +20,14 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `Int` values. Please use the singleton 
`DataTypes.IntegerType`.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class IntegerType private() extends IntegralType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type 
"IntegerType$" in byte code.
@@ -51,5 +51,5 @@ class IntegerType private() extends IntegralType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object IntegerType extends IntegerType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
index 0997028..3c49c72 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
@@ -20,14 +20,14 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `Long` values. Please use the singleton 
`DataTypes.LongType`.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class LongType private() extends IntegralType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "LongType$" 
in byte code.
@@ -51,5 +51,5 @@ class LongType private() extends IntegralType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object LongType extends LongType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index 594e155..29b9ffc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type for Maps. Keys in a map are not allowed to have `null` values.
@@ -31,7 +31,7 @@ import org.apache.spark.annotation.InterfaceStability
  * @param valueType The data type of map values.
  * @param valueContainsNull Indicates if map values have `null` values.
  */
[email protected]
+@Stable
 case class MapType(
   keyType: DataType,
   valueType: DataType,
@@ -78,7 +78,7 @@ case class MapType(
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 object MapType extends AbstractDataType {
 
   override private[sql] def defaultConcreteType: DataType = apply(NullType, 
NullType)

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
index 7c15dc0..4979ace 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
@@ -22,7 +22,7 @@ import scala.collection.mutable
 import org.json4s._
 import org.json4s.jackson.JsonMethods._
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 
 /**
@@ -37,7 +37,7 @@ import org.apache.spark.annotation.InterfaceStability
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 sealed class Metadata private[types] (private[types] val map: Map[String, Any])
   extends Serializable {
 
@@ -117,7 +117,7 @@ sealed class Metadata private[types] (private[types] val 
map: Map[String, Any])
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 object Metadata {
 
   private[this] val _empty = new Metadata(Map.empty)
@@ -228,7 +228,7 @@ object Metadata {
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class MetadataBuilder {
 
   private val map: mutable.Map[String, Any] = mutable.Map.empty

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
index 494225b..14097a5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
@@ -17,15 +17,14 @@
 
 package org.apache.spark.sql.types
 
-import org.apache.spark.annotation.InterfaceStability
-
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `NULL` values. Please use the singleton 
`DataTypes.NullType`.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class NullType private() extends DataType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "NullType$" 
in byte code.
@@ -38,5 +37,5 @@ class NullType private() extends DataType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object NullType extends NullType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
index 203e85e..6756b20 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
@@ -19,9 +19,9 @@ package org.apache.spark.sql.types
 
 import scala.language.existentials
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Evolving
 
[email protected]
+@Evolving
 object ObjectType extends AbstractDataType {
   override private[sql] def defaultConcreteType: DataType =
     throw new UnsupportedOperationException(
@@ -38,7 +38,7 @@ object ObjectType extends AbstractDataType {
 /**
  * Represents a JVM object that is passing through Spark SQL expression 
evaluation.
  */
[email protected]
+@Evolving
 case class ObjectType(cls: Class[_]) extends DataType {
   override def defaultSize: Int = 4096
 

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
index ee655c3..9b5ddfe 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
@@ -20,14 +20,14 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `Short` values. Please use the singleton 
`DataTypes.ShortType`.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class ShortType private() extends IntegralType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "ShortType$" 
in byte code.
@@ -51,5 +51,5 @@ class ShortType private() extends IntegralType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object ShortType extends ShortType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
index 59b124c..8ce1cd0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.unsafe.types.UTF8String
 
 /**
@@ -28,7 +28,7 @@ import org.apache.spark.unsafe.types.UTF8String
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class StringType private() extends AtomicType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type "StringType$" 
in byte code.
@@ -48,6 +48,6 @@ class StringType private() extends AtomicType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object StringType extends StringType
 

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
index 35f9970..6f6b561 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, 
quoteIdentifier}
 
 /**
@@ -33,7 +33,7 @@ import 
org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, quoteIdenti
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 case class StructField(
     name: String,
     dataType: DataType,

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index 06289b1..3bef75d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -24,10 +24,10 @@ import scala.util.control.NonFatal
 import org.json4s.JsonDSL._
 
 import org.apache.spark.SparkException
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 import org.apache.spark.sql.catalyst.expressions.{Attribute, 
AttributeReference, InterpretedOrdering}
 import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, 
LegacyTypeStringParser}
-import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, 
quoteIdentifier}
+import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.util.Utils
 
 /**
@@ -95,7 +95,7 @@ import org.apache.spark.util.Utils
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 case class StructType(fields: Array[StructField]) extends DataType with 
Seq[StructField] {
 
   /** No-arg constructor for kryo. */
@@ -422,7 +422,7 @@ case class StructType(fields: Array[StructField]) extends 
DataType with Seq[Stru
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 object StructType extends AbstractDataType {
 
   override private[sql] def defaultConcreteType: DataType = new StructType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
index fdb91e0..a20f155 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.InterfaceStability
+import org.apache.spark.annotation.Stable
 
 /**
  * The data type representing `java.sql.Timestamp` values.
@@ -28,7 +28,7 @@ import org.apache.spark.annotation.InterfaceStability
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 class TimestampType private() extends AtomicType {
   // The companion object and this class is separated so the companion object 
also subclasses
   // this type. Otherwise, the companion object would be of type 
"TimestampType$" in byte code.
@@ -50,5 +50,5 @@ class TimestampType private() extends AtomicType {
 /**
  * @since 1.3.0
  */
[email protected]
+@Stable
 case object TimestampType extends TimestampType

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/core/src/main/java/org/apache/spark/api/java/function/FlatMapGroupsWithStateFunction.java
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/java/org/apache/spark/api/java/function/FlatMapGroupsWithStateFunction.java
 
b/sql/core/src/main/java/org/apache/spark/api/java/function/FlatMapGroupsWithStateFunction.java
index 802949c..d4e1d89 100644
--- 
a/sql/core/src/main/java/org/apache/spark/api/java/function/FlatMapGroupsWithStateFunction.java
+++ 
b/sql/core/src/main/java/org/apache/spark/api/java/function/FlatMapGroupsWithStateFunction.java
@@ -20,8 +20,8 @@ package org.apache.spark.api.java.function;
 import java.io.Serializable;
 import java.util.Iterator;
 
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.annotation.Experimental;
-import org.apache.spark.annotation.InterfaceStability;
 import org.apache.spark.sql.streaming.GroupState;
 
 /**
@@ -33,7 +33,7 @@ import org.apache.spark.sql.streaming.GroupState;
  * @since 2.1.1
  */
 @Experimental
[email protected]
+@Evolving
 public interface FlatMapGroupsWithStateFunction<K, V, S, R> extends 
Serializable {
   Iterator<R> call(K key, Iterator<V> values, GroupState<S> state) throws 
Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/core/src/main/java/org/apache/spark/api/java/function/MapGroupsWithStateFunction.java
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/java/org/apache/spark/api/java/function/MapGroupsWithStateFunction.java
 
b/sql/core/src/main/java/org/apache/spark/api/java/function/MapGroupsWithStateFunction.java
index 353e988..f0abfde 100644
--- 
a/sql/core/src/main/java/org/apache/spark/api/java/function/MapGroupsWithStateFunction.java
+++ 
b/sql/core/src/main/java/org/apache/spark/api/java/function/MapGroupsWithStateFunction.java
@@ -20,8 +20,8 @@ package org.apache.spark.api.java.function;
 import java.io.Serializable;
 import java.util.Iterator;
 
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.annotation.Experimental;
-import org.apache.spark.annotation.InterfaceStability;
 import org.apache.spark.sql.streaming.GroupState;
 
 /**
@@ -32,7 +32,7 @@ import org.apache.spark.sql.streaming.GroupState;
  * @since 2.1.1
  */
 @Experimental
[email protected]
+@Evolving
 public interface MapGroupsWithStateFunction<K, V, S, R> extends Serializable {
   R call(K key, Iterator<V> values, GroupState<S> state) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
----------------------------------------------------------------------
diff --git a/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java 
b/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
index 1c3c979..9cc073f 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
@@ -16,14 +16,14 @@
  */
 package org.apache.spark.sql;
 
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Stable;
 
 /**
  * SaveMode is used to specify the expected behavior of saving a DataFrame to 
a data source.
  *
  * @since 1.3.0
  */
[email protected]
+@Stable
 public enum SaveMode {
   /**
    * Append mode means that when saving a DataFrame to a data source, if 
data/table already exists,

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF0.java
----------------------------------------------------------------------
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF0.java 
b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF0.java
index 4eeb7be..631d6eb 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF0.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF0.java
@@ -19,12 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Stable;
 
 /**
  * A Spark SQL UDF that has 0 arguments.
  */
[email protected]
+@Stable
 public interface UDF0<R> extends Serializable {
     R call() throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/630e25e3/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
----------------------------------------------------------------------
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java 
b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
index 1460daf..a5d0140 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
@@ -19,12 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-import org.apache.spark.annotation.InterfaceStability;
+import org.apache.spark.annotation.Stable;
 
 /**
  * A Spark SQL UDF that has 1 arguments.
  */
[email protected]
+@Stable
 public interface UDF1<T1, R> extends Serializable {
   R call(T1 t1) throws Exception;
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to