This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.3 by this push:
new 1c02ed046bc [SPARK-44034][TESTS][3.3] Add a new test group for sql
module
1c02ed046bc is described below
commit 1c02ed046bc1295c26677046e63f1028a79246f4
Author: yangjie01 <[email protected]>
AuthorDate: Fri Sep 29 14:49:58 2023 +0800
[SPARK-44034][TESTS][3.3] Add a new test group for sql module
### What changes were proposed in this pull request?
The purpose of this pr is to add a new test tag `SlowSQLTest` to the sql
module, and identified some Suites with test cases more than 3 seconds, and
apply it to GA testing task to reduce the testing pressure of the `sql others`
group.
### Why are the changes needed?
For a long time, the sql module UTs has only two groups: `slow` and
`others`. The test cases in group `slow` are fixed, while the number of test
cases in group `others` continues to increase, which has had a certain impact
on the testing duration and stability of group `others`.
So this PR proposes to add a new testing group to share the testing
pressure of `sql others` group, which has made the testing time of the three
groups more average, and hope it can improve the stability of the GA task.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Should monitor GA
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #43160 from LuciferYang/SPARK-44034-33.
Authored-by: yangjie01 <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
.github/workflows/build_and_test.yml | 8 +++++++-
.../java/org/apache/spark/tags/SlowSQLTest.java | 21 ++++++++++-----------
.../spark/sql/ApproximatePercentileQuerySuite.scala | 2 ++
.../org/apache/spark/sql/CachedTableSuite.scala | 2 ++
.../apache/spark/sql/DataFrameAsOfJoinSuite.scala | 2 ++
.../scala/org/apache/spark/sql/DataFrameSuite.scala | 2 ++
.../spark/sql/DataFrameWindowFunctionsSuite.scala | 2 ++
.../org/apache/spark/sql/DatasetCacheSuite.scala | 3 ++-
.../test/scala/org/apache/spark/sql/JoinSuite.scala | 2 ++
.../WriteDistributionAndOrderingSuite.scala | 2 ++
.../execution/OptimizeMetadataOnlyQuerySuite.scala | 2 ++
.../spark/sql/execution/QueryExecutionSuite.scala | 3 ++-
.../execution/adaptive/AdaptiveQueryExecSuite.scala | 2 ++
.../FileSourceAggregatePushDownSuite.scala | 5 +++++
.../parquet/ParquetRebaseDatetimeSuite.scala | 3 +++
.../execution/streaming/state/RocksDBSuite.scala | 3 ++-
.../sql/execution/ui/AllExecutionsPageSuite.scala | 2 ++
.../spark/sql/expressions/ExpressionInfoSuite.scala | 2 ++
.../spark/sql/sources/BucketedReadSuite.scala | 2 ++
.../spark/sql/sources/BucketedWriteSuite.scala | 2 ++
.../DisableUnnecessaryBucketedScanSuite.scala | 3 +++
.../streaming/AcceptsLatestSeenOffsetSuite.scala | 2 ++
.../DeprecatedStreamingAggregationSuite.scala | 2 ++
.../sql/streaming/EventTimeWatermarkSuite.scala | 2 ++
.../spark/sql/streaming/FileStreamSinkSuite.scala | 3 +++
.../spark/sql/streaming/FileStreamSourceSuite.scala | 5 ++++-
.../spark/sql/streaming/FileStreamStressSuite.scala | 2 ++
.../FlatMapGroupsWithStateDistributionSuite.scala | 2 ++
.../sql/streaming/FlatMapGroupsWithStateSuite.scala | 2 ++
.../sql/streaming/MemorySourceStressSuite.scala | 2 ++
.../apache/spark/sql/streaming/StreamSuite.scala | 2 ++
.../sql/streaming/StreamingAggregationSuite.scala | 2 ++
.../sql/streaming/StreamingDeduplicationSuite.scala | 2 ++
.../spark/sql/streaming/StreamingJoinSuite.scala | 5 +++++
.../sql/streaming/StreamingQueryListenerSuite.scala | 2 ++
.../sql/streaming/StreamingQueryManagerSuite.scala | 2 ++
.../spark/sql/streaming/StreamingQuerySuite.scala | 2 ++
.../StreamingSessionWindowDistributionSuite.scala | 2 ++
.../sql/streaming/StreamingSessionWindowSuite.scala | 2 ++
...treamingStateStoreFormatCompatibilitySuite.scala | 2 ++
.../sql/streaming/TriggerAvailableNowSuite.scala | 2 ++
.../ContinuousQueryStatusAndProgressSuite.scala | 2 ++
.../sql/streaming/continuous/ContinuousSuite.scala | 4 ++++
.../sources/StreamingDataSourceV2Suite.scala | 3 +++
.../test/DataStreamReaderWriterSuite.scala | 2 ++
.../streaming/test/DataStreamTableAPISuite.scala | 2 ++
.../sql/streaming/ui/StreamingQueryPageSuite.scala | 2 ++
.../spark/sql/streaming/ui/UISeleniumSuite.scala | 2 ++
48 files changed, 123 insertions(+), 16 deletions(-)
diff --git a/.github/workflows/build_and_test.yml
b/.github/workflows/build_and_test.yml
index f641dc90c22..0dc23a3788a 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -179,12 +179,18 @@ jobs:
hadoop: ${{ needs.configure-jobs.outputs.hadoop }}
hive: hive2.3
included-tags: org.apache.spark.tags.ExtendedSQLTest
+ comment: "- extended tests"
+ - modules: sql
+ java: ${{ needs.configure-jobs.outputs.java }}
+ hadoop: ${{ needs.configure-jobs.outputs.hadoop }}
+ hive: hive2.3
+ included-tags: org.apache.spark.tags.SlowSQLTest
comment: "- slow tests"
- modules: sql
java: ${{ needs.configure-jobs.outputs.java }}
hadoop: ${{ needs.configure-jobs.outputs.hadoop }}
hive: hive2.3
- excluded-tags: org.apache.spark.tags.ExtendedSQLTest
+ excluded-tags:
org.apache.spark.tags.ExtendedSQLTest,org.apache.spark.tags.SlowSQLTest
comment: "- other tests"
env:
MODULES_TO_TEST: ${{ matrix.modules }}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
b/common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java
similarity index 69%
copy from
sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
copy to common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java
index 7f2972edea7..63211aa23b3 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
+++ b/common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java
@@ -15,17 +15,16 @@
* limitations under the License.
*/
-package org.apache.spark.sql.streaming
+package org.apache.spark.tags;
-import org.apache.spark.sql.execution.streaming._
+import org.scalatest.TagAnnotation;
-class MemorySourceStressSuite extends StreamTest {
- import testImplicits._
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
- test("memory stress test") {
- val input = MemoryStream[Int]
- val mapped = input.toDS().map(_ + 1)
-
- runStressTest(mapped, AddData(input, _: _*))
- }
-}
+@TagAnnotation
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD, ElementType.TYPE})
+public @interface SlowSQLTest { }
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
index 3fd1592a107..3f0b7283b5b 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
@@ -25,10 +25,12 @@ import
org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile
import
org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile.PercentileDigest
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
/**
* End-to-end tests for approximate percentile aggregate function.
*/
+@SlowSQLTest
class ApproximatePercentileQuerySuite extends QueryTest with
SharedSparkSession {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 4de409f56d0..d27821b59be 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -45,11 +45,13 @@ import org.apache.spark.sql.test.{SharedSparkSession,
SQLTestUtils}
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.storage.{RDDBlockId, StorageLevel}
import org.apache.spark.storage.StorageLevel.{MEMORY_AND_DISK_2, MEMORY_ONLY}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.unsafe.types.CalendarInterval
import org.apache.spark.util.{AccumulatorContext, Utils}
private case class BigData(s: String)
+@SlowSQLTest
class CachedTableSuite extends QueryTest with SQLTestUtils
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
index 749efe95c5d..e0dfd54e7b3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
@@ -23,7 +23,9 @@ import
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class DataFrameAsOfJoinSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index a696c3fd499..d9044f9dc27 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -48,10 +48,12 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT,
SharedSparkSession}
import org.apache.spark.sql.test.SQLTestData.{ArrayStringWrapper,
ContainerStringWrapper, DecimalData, StringWrapper, TestData2}
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.unsafe.types.CalendarInterval
import org.apache.spark.util.Utils
import org.apache.spark.util.random.XORShiftRandom
+@SlowSQLTest
class DataFrameSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
index e57650ff629..097188186e7 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
@@ -31,10 +31,12 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
/**
* Window function testing for DataFrame API.
*/
+@SlowSQLTest
class DataFrameWindowFunctionsSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper{
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
index 48b20cc9094..a657c6212aa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
@@ -25,8 +25,9 @@ import
org.apache.spark.sql.execution.columnar.{InMemoryRelation, InMemoryTableS
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.storage.StorageLevel
+import org.apache.spark.tags.SlowSQLTest
-
+@SlowSQLTest
class DatasetCacheSuite extends QueryTest
with SharedSparkSession
with TimeLimits
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
index fc46bdea6a2..93850a99677 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
@@ -37,7 +37,9 @@ import
org.apache.spark.sql.execution.python.BatchEvalPythonExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.StructType
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class JoinSuite extends QueryTest with SharedSparkSession with
AdaptiveSparkPlanHelper {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
index 36efe5ec1d2..c00d19d8fa2 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
@@ -35,7 +35,9 @@ import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.streaming.{StreamingQueryException, Trigger}
import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
import org.apache.spark.sql.util.QueryExecutionListener
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class WriteDistributionAndOrderingSuite extends
DistributionAndOrderingSuiteBase {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
index 68691e2f7fd..cbc565974cd 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
@@ -24,7 +24,9 @@ import
org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.OPTIMIZER_METADATA_ONLY
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class OptimizeMetadataOnlyQuerySuite extends QueryTest with SharedSparkSession
{
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
index 41a1cd9b294..8db7a2af83f 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
@@ -219,7 +219,8 @@ class QueryExecutionSuite extends SharedSparkSession {
assertNoTag(tag5, df.queryExecution.sparkPlan)
}
- test("Logging plan changes for execution") {
+ // TODO(SPARK-44074): re-enable this test after SPARK-44074 resolved
+ ignore("Logging plan changes for execution") {
val testAppender = new LogAppender("plan changes")
withLogAppender(testAppender) {
withSQLConf(SQLConf.PLAN_CHANGE_LOG_LEVEL.key -> "INFO") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
index ca481b6f944..b38c3f41270 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
@@ -45,8 +45,10 @@ import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData.TestData
import org.apache.spark.sql.types.{IntegerType, StructType}
import org.apache.spark.sql.util.QueryExecutionListener
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class AdaptiveQueryExecSuite
extends QueryTest
with SharedSparkSession
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
index 26dfe1a5097..9fe04698a5d 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
@@ -28,6 +28,7 @@ import org.apache.spark.sql.functions.min
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{BinaryType, BooleanType, ByteType,
DateType, Decimal, DecimalType, DoubleType, FloatType, IntegerType, LongType,
ShortType, StringType, StructField, StructType, TimestampType}
+import org.apache.spark.tags.SlowSQLTest
/**
* A test suite that tests aggregate push down for Parquet and ORC.
@@ -652,12 +653,14 @@ abstract class ParquetAggregatePushDownSuite
SQLConf.PARQUET_AGGREGATE_PUSHDOWN_ENABLED.key
}
+@SlowSQLTest
class ParquetV1AggregatePushDownSuite extends ParquetAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
+@SlowSQLTest
class ParquetV2AggregatePushDownSuite extends ParquetAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
@@ -671,12 +674,14 @@ abstract class OrcAggregatePushDownSuite extends OrcTest
with FileSourceAggregat
SQLConf.ORC_AGGREGATE_PUSHDOWN_ENABLED.key
}
+@SlowSQLTest
class OrcV1AggregatePushDownSuite extends OrcAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "orc")
}
+@SlowSQLTest
class OrcV2AggregatePushDownSuite extends OrcAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
index dbf7f54f6ff..e01682a5808 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
@@ -28,6 +28,7 @@ import
org.apache.spark.sql.internal.SQLConf.{LegacyBehaviorPolicy, ParquetOutpu
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.{CORRECTED,
EXCEPTION, LEGACY}
import
org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType.{INT96,
TIMESTAMP_MICROS, TIMESTAMP_MILLIS}
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
abstract class ParquetRebaseDatetimeSuite
extends QueryTest
@@ -461,6 +462,7 @@ abstract class ParquetRebaseDatetimeSuite
}
}
+@SlowSQLTest
class ParquetRebaseDatetimeV1Suite extends ParquetRebaseDatetimeSuite {
override protected def sparkConf: SparkConf =
super
@@ -468,6 +470,7 @@ class ParquetRebaseDatetimeV1Suite extends
ParquetRebaseDatetimeSuite {
.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
+@SlowSQLTest
class ParquetRebaseDatetimeV2Suite extends ParquetRebaseDatetimeSuite {
override protected def sparkConf: SparkConf =
super
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
index 9a0a1a55b88..21106aa72a6 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
@@ -29,9 +29,10 @@ import org.apache.spark._
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.execution.streaming.CreateAtomicTestManager
import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.tags.ExtendedRocksDBTest
+import org.apache.spark.tags.{ExtendedRocksDBTest, SlowSQLTest}
import org.apache.spark.util.{ThreadUtils, Utils}
+@SlowSQLTest
@ExtendedRocksDBTest
class RocksDBSuite extends SparkFunSuite {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
index 1f5cbb0e19e..2b880a0a678 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
@@ -31,8 +31,10 @@ import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.execution.{SparkPlanInfo, SQLExecution}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.status.ElementTrackingStore
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.kvstore.InMemoryStore
+@SlowSQLTest
class AllExecutionsPageSuite extends SharedSparkSession with BeforeAndAfter {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
index 7f25831dfe2..e019c42bc87 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
@@ -25,8 +25,10 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution.HiveResult.hiveResultString
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class ExpressionInfoSuite extends SparkFunSuite with SharedSparkSession {
test("Replace _FUNC_ in ExpressionInfo") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
index bdd642a1f90..e103df29bc0 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
@@ -33,8 +33,10 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.collection.BitSet
+@SlowSQLTest
class BucketedReadWithoutHiveSupportSuite
extends BucketedReadSuite with SharedSparkSession {
protected override def beforeAll(): Unit = {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
index ae35f29c764..cc0d151def0 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
@@ -29,7 +29,9 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class BucketedWriteWithoutHiveSupportSuite extends BucketedWriteSuite with
SharedSparkSession {
protected override def beforeAll(): Unit = {
super.beforeAll()
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
index 737cffc42f9..1f55742cd67 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
@@ -26,7 +26,9 @@ import
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class DisableUnnecessaryBucketedScanWithoutHiveSupportSuite
extends DisableUnnecessaryBucketedScanSuite
with SharedSparkSession
@@ -38,6 +40,7 @@ class DisableUnnecessaryBucketedScanWithoutHiveSupportSuite
}
}
+@SlowSQLTest
class DisableUnnecessaryBucketedScanWithoutHiveSupportSuiteAE
extends DisableUnnecessaryBucketedScanSuite
with SharedSparkSession
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
index d3e9a08509b..b0c585ffda5 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
@@ -26,7 +26,9 @@ import
org.apache.spark.sql.connector.read.streaming.{AcceptsLatestSeenOffset, S
import org.apache.spark.sql.execution.streaming._
import
org.apache.spark.sql.execution.streaming.sources.{ContinuousMemoryStream,
ContinuousMemoryStreamOffset}
import org.apache.spark.sql.types.{LongType, StructType}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class AcceptsLatestSeenOffsetSuite extends StreamTest with BeforeAndAfter {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
index 99f7e32d4df..7777887ec62 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
@@ -24,7 +24,9 @@ import
org.apache.spark.sql.execution.streaming.state.StreamingAggregationStateM
import org.apache.spark.sql.expressions.scalalang.typed
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
@deprecated("This test suite will be removed.", "3.0.0")
class DeprecatedStreamingAggregationSuite extends StateStoreMetricsTest with
Assertions {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
index 3d315be6367..de0a79834dd 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
@@ -38,8 +38,10 @@ import
org.apache.spark.sql.execution.streaming.sources.MemorySink
import org.apache.spark.sql.functions.{count, timestamp_seconds, window}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class EventTimeWatermarkSuite extends StreamTest with BeforeAndAfter with
Matchers with Logging {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
index 407d7835a6a..689e016a84b 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
@@ -40,6 +40,7 @@ import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
abstract class FileStreamSinkSuite extends StreamTest {
@@ -670,6 +671,7 @@ class
PendingCommitFilesTrackingManifestFileCommitProtocol(jobId: String, path:
}
}
+@SlowSQLTest
class FileStreamSinkV1Suite extends FileStreamSinkSuite {
override protected def sparkConf: SparkConf =
super
@@ -720,6 +722,7 @@ class FileStreamSinkV1Suite extends FileStreamSinkSuite {
}
}
+@SlowSQLTest
class FileStreamSinkV2Suite extends FileStreamSinkSuite {
override protected def sparkConf: SparkConf =
super
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
index 1297adb5135..297687e3c24 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
@@ -42,7 +42,8 @@ import
org.apache.spark.sql.execution.streaming.sources.MemorySink
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.test.SharedSparkSession
-import org.apache.spark.sql.types.{StructType, _}
+import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
abstract class FileStreamSourceTest
@@ -224,6 +225,7 @@ abstract class FileStreamSourceTest
val valueSchema = new StructType().add("value", StringType)
}
+@SlowSQLTest
class FileStreamSourceSuite extends FileStreamSourceTest {
import testImplicits._
@@ -2343,6 +2345,7 @@ class FileStreamSourceSuite extends FileStreamSourceTest {
}
}
+@SlowSQLTest
class FileStreamSourceStressTestSuite extends FileStreamSourceTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
index 28412ea07a7..a27a04283da 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
@@ -24,6 +24,7 @@ import scala.util.Random
import scala.util.control.NonFatal
import org.apache.spark.sql.catalyst.util._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
/**
@@ -36,6 +37,7 @@ import org.apache.spark.util.Utils
*
* At the end, the resulting files are loaded and the answer is checked.
*/
+@SlowSQLTest
class FileStreamStressSuite extends StreamTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
index f1578ae5df9..b597a244710 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
@@ -26,8 +26,10 @@ import
org.apache.spark.sql.execution.streaming.{FlatMapGroupsWithStateExec, Mem
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.GroupStateTimeout.ProcessingTimeTimeout
import
org.apache.spark.sql.streaming.util.{StatefulOpClusteredDistributionTestHelper,
StreamManualClock}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class FlatMapGroupsWithStateDistributionSuite extends StreamTest
with StatefulOpClusteredDistributionTestHelper {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
index 7012dec91ec..1e6328089b1 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
@@ -39,6 +39,7 @@ import org.apache.spark.sql.functions.timestamp_seconds
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.types.{DataType, IntegerType}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
/** Class to check custom state types */
@@ -46,6 +47,7 @@ case class RunningCount(count: Long)
case class Result(key: Long, count: Int)
+@SlowSQLTest
class FlatMapGroupsWithStateSuite extends StateStoreMetricsTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
index 7f2972edea7..65ac6712ab4 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
@@ -18,7 +18,9 @@
package org.apache.spark.sql.streaming
import org.apache.spark.sql.execution.streaming._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class MemorySourceStressSuite extends StreamTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
index f2031b94231..91c7913aecb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
@@ -47,8 +47,10 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.StreamSourceProvider
import org.apache.spark.sql.streaming.util.{BlockOnStopSourceProvider,
StreamManualClock}
import org.apache.spark.sql.types.{IntegerType, LongType, StructField,
StructType}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamSuite extends StreamTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
index 64dffe7f571..32f61c3e714 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
@@ -43,12 +43,14 @@ import org.apache.spark.sql.streaming.OutputMode._
import org.apache.spark.sql.streaming.util.{MockSourceProvider,
StreamManualClock}
import org.apache.spark.sql.types.{StructType, TimestampType}
import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
object FailureSingleton {
var firstTime = true
}
+@SlowSQLTest
class StreamingAggregationSuite extends StateStoreMetricsTest with Assertions {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
index 0315e03d187..190a67a25d6 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
@@ -26,8 +26,10 @@ import
org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingDeduplicationSuite extends StateStoreMetricsTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
index 5b899453283..ef40ba294cd 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
@@ -36,6 +36,7 @@ import
org.apache.spark.sql.execution.streaming.{MemoryStream, StatefulOperatorS
import
org.apache.spark.sql.execution.streaming.state.{RocksDBStateStoreProvider,
StateStore, StateStoreProviderId}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
abstract class StreamingJoinSuite
@@ -223,6 +224,7 @@ abstract class StreamingJoinSuite
}
}
+@SlowSQLTest
class StreamingInnerJoinSuite extends StreamingJoinSuite {
import testImplicits._
@@ -776,6 +778,7 @@ class StreamingInnerJoinSuite extends StreamingJoinSuite {
}
+@SlowSQLTest
class StreamingOuterJoinSuite extends StreamingJoinSuite {
import testImplicits._
@@ -1417,6 +1420,7 @@ class StreamingOuterJoinSuite extends StreamingJoinSuite {
}
}
+@SlowSQLTest
class StreamingFullOuterJoinSuite extends StreamingJoinSuite {
test("windowed full outer join") {
@@ -1620,6 +1624,7 @@ class StreamingFullOuterJoinSuite extends
StreamingJoinSuite {
}
}
+@SlowSQLTest
class StreamingLeftSemiJoinSuite extends StreamingJoinSuite {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
index e729fe32eba..1432c3f0a6c 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
@@ -35,8 +35,10 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.StreamingQueryListener._
import org.apache.spark.sql.streaming.ui.StreamingQueryStatusListener
import org.apache.spark.sql.streaming.util.StreamManualClock
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.JsonProtocol
+@SlowSQLTest
class StreamingQueryListenerSuite extends StreamTest with BeforeAndAfter {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
index cc66ce85673..7deb0c69d12 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
@@ -34,8 +34,10 @@ import
org.apache.spark.sql.execution.datasources.v2.StreamingDataSourceV2Relati
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.BlockingSource
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingQueryManagerSuite extends StreamTest {
import AwaitTerminationTester._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
index 84060733e86..a40e34e4e2f 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
@@ -50,7 +50,9 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.{BlockingSource,
MockSourceProvider, StreamManualClock}
import org.apache.spark.sql.types.StructType
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class StreamingQuerySuite extends StreamTest with BeforeAndAfter with Logging
with MockitoSugar {
import AwaitTerminationTester._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
index bb7b9804105..51e1b1a23da 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
@@ -28,8 +28,10 @@ import
org.apache.spark.sql.execution.streaming.{MemoryStream, SessionWindowStat
import org.apache.spark.sql.functions.{count, session_window}
import org.apache.spark.sql.internal.SQLConf
import
org.apache.spark.sql.streaming.util.StatefulOpClusteredDistributionTestHelper
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingSessionWindowDistributionSuite extends StreamTest
with StatefulOpClusteredDistributionTestHelper with Logging {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
index 3ed23bad6a3..647395fcc4b 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
@@ -28,8 +28,10 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
import
org.apache.spark.sql.execution.streaming.state.{HDFSBackedStateStoreProvider,
RocksDBStateStoreProvider}
import org.apache.spark.sql.functions.{count, session_window, sum}
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingSessionWindowSuite extends StreamTest
with BeforeAndAfter with Matchers with Logging {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
index 1032d6c5b6f..4827d06d64d 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
@@ -29,6 +29,7 @@ import
org.apache.spark.sql.catalyst.streaming.InternalOutputModes.Complete
import org.apache.spark.sql.execution.streaming.MemoryStream
import
org.apache.spark.sql.execution.streaming.state.{InvalidUnsafeRowException,
StateSchemaNotCompatible}
import org.apache.spark.sql.functions._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
/**
@@ -39,6 +40,7 @@ import org.apache.spark.util.Utils
* a new test for the issue, just like the test suite "SPARK-28067 changed the
sum decimal unsafe
* row format".
*/
+@SlowSQLTest
class StreamingStateStoreFormatCompatibilitySuite extends StreamTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
index cb4410d9da9..65deca22207 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
@@ -26,7 +26,9 @@ import org.apache.spark.sql.connector.read.streaming
import org.apache.spark.sql.connector.read.streaming.{ReadLimit,
SupportsAdmissionControl}
import org.apache.spark.sql.execution.streaming.{LongOffset, MemoryStream,
Offset, SerializedOffset, Source, StreamingExecutionRelation}
import org.apache.spark.sql.types.{LongType, StructType}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class TriggerAvailableNowSuite extends FileStreamSourceTest {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
index 59d6ac0af52..d7aa99c30aa 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
@@ -20,7 +20,9 @@ package org.apache.spark.sql.streaming.continuous
import org.apache.spark.sql.execution.streaming.StreamExecution
import org.apache.spark.sql.execution.streaming.sources.ContinuousMemoryStream
import org.apache.spark.sql.streaming.Trigger
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class ContinuousQueryStatusAndProgressSuite extends ContinuousSuiteBase {
test("StreamingQueryStatus - ContinuousExecution isDataAvailable and
isTriggerActive " +
"should be false") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
index 5893c3da098..c2668a0e750 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.functions._
import
org.apache.spark.sql.internal.SQLConf.{CONTINUOUS_STREAMING_EPOCH_BACKLOG_QUEUE_SIZE,
MIN_BATCHES_TO_RETAIN}
import org.apache.spark.sql.streaming.{StreamTest, Trigger}
import org.apache.spark.sql.test.TestSparkSession
+import org.apache.spark.tags.SlowSQLTest
class ContinuousSuiteBase extends StreamTest {
// We need more than the default local[2] to be able to schedule all
partitions simultaneously.
@@ -89,6 +90,7 @@ class ContinuousSuiteBase extends StreamTest {
override protected val defaultTrigger = Trigger.Continuous(100)
}
+@SlowSQLTest
class ContinuousSuite extends ContinuousSuiteBase {
import IntegratedUDFTestUtils._
import testImplicits._
@@ -297,6 +299,7 @@ class ContinuousSuite extends ContinuousSuiteBase {
}
}
+@SlowSQLTest
class ContinuousStressSuite extends ContinuousSuiteBase {
import testImplicits._
@@ -372,6 +375,7 @@ class ContinuousStressSuite extends ContinuousSuiteBase {
}
}
+@SlowSQLTest
class ContinuousMetaSuite extends ContinuousSuiteBase {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
index 251a02d922e..9c0e62ad6a7 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
@@ -35,8 +35,10 @@ import org.apache.spark.sql.sources.{DataSourceRegister,
StreamSinkProvider}
import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery, StreamTest,
Trigger}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class FakeDataStream extends MicroBatchStream with ContinuousStream {
override def deserializeOffset(json: String): Offset =
RateStreamOffset(Map())
override def commit(end: Offset): Unit = {}
@@ -269,6 +271,7 @@ object LastWriteOptions {
}
}
+@SlowSQLTest
class StreamingDataSourceV2Suite extends StreamTest {
override def beforeAll(): Unit = {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
index c40ba02fd0d..9fa28684ba7 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
@@ -36,6 +36,7 @@ import org.apache.spark.sql.sources.{StreamSinkProvider,
StreamSourceProvider}
import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery,
StreamingQueryException, StreamTest}
import org.apache.spark.sql.streaming.Trigger._
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
object LastOptions {
@@ -108,6 +109,7 @@ class DefaultSource extends StreamSourceProvider with
StreamSinkProvider {
}
}
+@SlowSQLTest
class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter {
import testImplicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
index 61b3ec26a4d..a4432ec9a17 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
@@ -37,8 +37,10 @@ import org.apache.spark.sql.streaming.StreamTest
import org.apache.spark.sql.streaming.sources.FakeScanBuilder
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class DataStreamTableAPISuite extends StreamTest with BeforeAndAfter {
import testImplicits._
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
index 78ade6a1eef..0f390dde263 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
@@ -29,8 +29,10 @@ import org.apache.spark.SparkConf
import org.apache.spark.sql.execution.ui.StreamingQueryStatusStore
import org.apache.spark.sql.streaming.StreamingQueryProgress
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.ui.SparkUI
+@SlowSQLTest
class StreamingQueryPageSuite extends SharedSparkSession with BeforeAndAfter {
test("correctly display streaming query page") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
index db3d6529c99..95f0921922e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
@@ -35,8 +35,10 @@ import org.apache.spark.sql.functions.{window => windowFn, _}
import org.apache.spark.sql.internal.SQLConf.SHUFFLE_PARTITIONS
import
org.apache.spark.sql.internal.StaticSQLConf.ENABLED_STREAMING_UI_CUSTOM_METRIC_LIST
import org.apache.spark.sql.streaming.{StreamingQueryException, Trigger}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.ui.SparkUICssErrorHandler
+@SlowSQLTest
class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with
BeforeAndAfterAll {
implicit var webDriver: WebDriver = _
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]