This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new fea2a8d  [SPARK-33810][TESTS] Reenable test cases disabled in 
SPARK-31732
fea2a8d is described below

commit fea2a8d4b617e852d639e50238d98b8c0432e3b9
Author: HyukjinKwon <[email protected]>
AuthorDate: Wed Dec 16 08:34:22 2020 -0800

    [SPARK-33810][TESTS] Reenable test cases disabled in SPARK-31732
    
    ### What changes were proposed in this pull request?
    
    The test failures were due to machine being slow in Jenkins. We switched to 
Ubuntu 20 if I am not wrong.
    Looks like all machines are functioning properly unlike the past, and the 
tests pass without a problem anymore.
    
    This PR proposes to enable them back.
    
    ### Why are the changes needed?
    
    To restore test coverage.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, dev-only.
    
    ### How was this patch tested?
    
    Jenkins jobs in this PR show the flakiness.
    
    Closes #30798 from HyukjinKwon/do-not-merge-test.
    
    Authored-by: HyukjinKwon <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
    (cherry picked from commit 3d0323401f7a3e4369a3d3f4ff98f15d19e8a643)
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala  | 3 +--
 .../scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala    | 3 +--
 .../apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala    | 6 ++----
 .../scala/org/apache/spark/streaming/StreamingContextSuite.scala    | 3 +--
 4 files changed, 5 insertions(+), 10 deletions(-)

diff --git 
a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala
 
b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala
index f2be847..62ba459 100644
--- 
a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala
+++ 
b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala
@@ -349,8 +349,7 @@ abstract class KafkaMicroBatchSourceSuiteBase extends 
KafkaSourceSuiteBase {
     )
   }
 
-  // TODO (SPARK-31731): re-enable it
-  ignore("subscribing topic by pattern with topic deletions") {
+  test("subscribing topic by pattern with topic deletions") {
     val topicPrefix = newTopic()
     val topic = topicPrefix + "-seems"
     val topic2 = topicPrefix + "-bad"
diff --git 
a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala
 
b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala
index 16fa24a..940795f 100644
--- 
a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala
+++ 
b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala
@@ -177,8 +177,7 @@ abstract class KafkaRelationSuiteBase extends QueryTest 
with SharedSparkSession
       ("3", Seq(("e", "f".getBytes(UTF_8)), ("e", "g".getBytes(UTF_8))))).toDF)
   }
 
-  // TODO (SPARK-31729): re-enable it
-  ignore("timestamp provided for starting and ending") {
+  test("timestamp provided for starting and ending") {
     val (topic, timestamps) = prepareTimestampRelatedUnitTest
 
     // timestamp both presented: starting "first" ending "finalized"
diff --git 
a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
 
b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
index 72cf3e8..2b7fef1 100644
--- 
a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
+++ 
b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
@@ -332,8 +332,7 @@ class DirectKafkaStreamSuite
   }
 
   // Test to verify the offset ranges can be recovered from the checkpoints
-  // TODO (SPARK-31722): re-enable it
-  ignore("offset recovery") {
+  test("offset recovery") {
     val topic = "recovery"
     kafkaTestUtils.createTopic(topic)
     testDir = Utils.createTempDir()
@@ -420,8 +419,7 @@ class DirectKafkaStreamSuite
   }
 
   // Test to verify the offsets can be recovered from Kafka
-  // TODO (SPARK-31722): re-enable it
-  ignore("offset recovery from kafka") {
+  test("offset recovery from kafka") {
     val topic = "recoveryfromkafka"
     kafkaTestUtils.createTopic(topic)
 
diff --git 
a/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
 
b/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
index 4eff464..1d66378 100644
--- 
a/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
+++ 
b/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
@@ -293,8 +293,7 @@ class StreamingContextSuite
     }
   }
 
-  // TODO (SPARK-31728): re-enable it
-  ignore("stop gracefully") {
+  test("stop gracefully") {
     val conf = new SparkConf().setMaster(master).setAppName(appName)
     conf.set("spark.dummyTimeConfig", "3600s")
     val sc = new SparkContext(conf)


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to