This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch release-1.10
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 966209aea4520c88bc25279e1954ab2e7cc37065
Author: Timo Walther <[email protected]>
AuthorDate: Tue Dec 17 11:47:57 2019 +0100

    [hotfix][table] Use testing infrastacture in tests
---
 .../org/apache/flink/table/catalog/CatalogTableITCase.scala   |  4 +++-
 .../table/runtime/batch/sql/PartitionableSinkITCase.scala     | 11 ++++++-----
 2 files changed, 9 insertions(+), 6 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/catalog/CatalogTableITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/catalog/CatalogTableITCase.scala
index cece871..e406afd 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/catalog/CatalogTableITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/catalog/CatalogTableITCase.scala
@@ -30,11 +30,13 @@ import org.junit.runners.Parameterized
 import org.junit.{Before, Ignore, Test}
 import java.util
 
+import org.apache.flink.test.util.AbstractTestBase
+
 import scala.collection.JavaConversions._
 
 /** Test cases for catalog table. */
 @RunWith(classOf[Parameterized])
-class CatalogTableITCase(isStreaming: Boolean) {
+class CatalogTableITCase(isStreaming: Boolean) extends AbstractTestBase {
 
   private val batchExec: ExecutionEnvironment = 
ExecutionEnvironment.getExecutionEnvironment
   private var batchEnv: BatchTableEnvironment = _
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala
index c7fe1be..219b2db 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala
@@ -18,11 +18,14 @@
 
 package org.apache.flink.table.runtime.batch.sql
 
+import java.util.{LinkedList => JLinkedList, Map => JMap}
+
 import org.apache.flink.api.common.ExecutionConfig
 import org.apache.flink.api.common.functions.MapFunction
 import org.apache.flink.api.common.io.RichOutputFormat
 import org.apache.flink.api.common.typeinfo.BasicTypeInfo.{INT_TYPE_INFO, 
LONG_TYPE_INFO, STRING_TYPE_INFO}
 import org.apache.flink.api.common.typeinfo.TypeInformation
+import org.apache.flink.api.java
 import org.apache.flink.api.java.DataSet
 import org.apache.flink.api.java.typeutils.RowTypeInfo
 import org.apache.flink.api.scala.ExecutionEnvironment
@@ -34,19 +37,17 @@ import 
org.apache.flink.table.runtime.batch.sql.PartitionableSinkITCase._
 import org.apache.flink.table.sinks.{BatchTableSink, PartitionableTableSink, 
TableSink}
 import org.apache.flink.table.sources.BatchTableSource
 import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType}
+import org.apache.flink.test.util.AbstractTestBase
 import org.apache.flink.types.Row
-
 import org.junit.Assert.assertEquals
 import org.junit.rules.ExpectedException
 import org.junit.{Before, Rule, Test}
 
-import java.util.{LinkedList => JLinkedList, List => JList, Map => JMap}
-import org.apache.flink.api.java
-
 import scala.collection.JavaConversions._
 import scala.collection.Seq
 
-class PartitionableSinkITCase {
+class PartitionableSinkITCase extends AbstractTestBase {
+
   private val batchExec: ExecutionEnvironment = 
ExecutionEnvironment.getExecutionEnvironment
   private var tEnv: BatchTableEnvironment = _
   private val type3 = new RowTypeInfo(INT_TYPE_INFO, LONG_TYPE_INFO, 
STRING_TYPE_INFO)

Reply via email to