This is an automated email from the ASF dual-hosted git repository.

bli pushed a commit to branch release-1.9
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.9 by this push:
     new d230aac  [FLINK-13312][hive] move tests for data type mappings between 
Flink and Hive into its own test class
d230aac is described below

commit d230aac5a479891b3d5421105c1e862de94a9a89
Author: bowen.li <bowenl...@gmail.com>
AuthorDate: Wed Jul 17 14:36:09 2019 -0700

    [FLINK-13312][hive] move tests for data type mappings between Flink and 
Hive into its own test class
    
    This PR moves UT for data type mapping between Flink and Hive to its own 
test class.
    
    This closes #9151.
---
 ...adataTest.java => HiveCatalogDataTypeTest.java} | 194 +++++++--------------
 .../hive/HiveCatalogGenericMetadataTest.java       | 129 --------------
 .../apache/flink/table/catalog/CatalogTest.java    |   4 +-
 3 files changed, 68 insertions(+), 259 deletions(-)

diff --git 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogDataTypeTest.java
similarity index 62%
copy from 
flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
copy to 
flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogDataTypeTest.java
index 83e0132..e9d40fe 100644
--- 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
+++ 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogDataTypeTest.java
@@ -20,10 +20,12 @@ package org.apache.flink.table.catalog.hive;
 
 import org.apache.flink.table.api.DataTypes;
 import org.apache.flink.table.api.TableSchema;
-import org.apache.flink.table.catalog.CatalogPartition;
+import org.apache.flink.table.catalog.CatalogDatabase;
+import org.apache.flink.table.catalog.CatalogDatabaseImpl;
 import org.apache.flink.table.catalog.CatalogTable;
 import org.apache.flink.table.catalog.CatalogTableImpl;
-import org.apache.flink.table.catalog.CatalogTestBase;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.catalog.config.CatalogConfig;
 import org.apache.flink.table.catalog.exceptions.CatalogException;
 import org.apache.flink.table.types.DataType;
 import org.apache.flink.table.types.logical.BinaryType;
@@ -31,17 +33,36 @@ import org.apache.flink.table.types.logical.VarBinaryType;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 
 import java.util.Arrays;
+import java.util.HashMap;
 
 import static org.junit.Assert.assertEquals;
 
 /**
- * Test for HiveCatalog on generic metadata.
+ * Test for data type mappings in HiveCatalog.
  */
-public class HiveCatalogGenericMetadataTest extends CatalogTestBase {
+public class HiveCatalogDataTypeTest {
+
+       private static HiveCatalog catalog;
+
+       protected final String db1 = "db1";
+       protected final String db2 = "db2";
+
+       protected final String t1 = "t1";
+       protected final String t2 = "t2";
+       protected final ObjectPath path1 = new ObjectPath(db1, t1);
+       protected final ObjectPath path2 = new ObjectPath(db2, t2);
+       protected final ObjectPath path3 = new ObjectPath(db1, t2);
+
+       @Rule
+       public ExpectedException exception = ExpectedException.none();
 
        @BeforeClass
        public static void init() {
@@ -49,12 +70,37 @@ public class HiveCatalogGenericMetadataTest extends 
CatalogTestBase {
                catalog.open();
        }
 
-       // ------ TODO: Move data types tests to its own test class as it's 
shared between generic metadata and hive metadata
-       // ------ data types ------
+       @After
+       public void cleanup() throws Exception {
+               if (catalog.tableExists(path1)) {
+                       catalog.dropTable(path1, true);
+               }
+               if (catalog.tableExists(path2)) {
+                       catalog.dropTable(path2, true);
+               }
+               if (catalog.tableExists(path3)) {
+                       catalog.dropTable(path3, true);
+               }
+               if (catalog.functionExists(path1)) {
+                       catalog.dropFunction(path1, true);
+               }
+               if (catalog.databaseExists(db1)) {
+                       catalog.dropDatabase(db1, true);
+               }
+               if (catalog.databaseExists(db2)) {
+                       catalog.dropDatabase(db2, true);
+               }
+       }
+
+       @AfterClass
+       public static void closeup() {
+               if (catalog != null) {
+                       catalog.close();
+               }
+       }
 
        @Test
        public void testDataTypes() throws Exception {
-               // TODO: the following Hive types are not supported in Flink 
yet, including MAP, STRUCT
                DataType[] types = new DataType[] {
                        DataTypes.TINYINT(),
                        DataTypes.SMALLINT(),
@@ -150,8 +196,11 @@ public class HiveCatalogGenericMetadataTest extends 
CatalogTestBase {
 
                return new CatalogTableImpl(
                        schema,
-                       getBatchTableProperties(),
-                       TEST_COMMENT
+                       new HashMap<String, String>() {{
+                               put("is_streaming", "false");
+                               put(CatalogConfig.IS_GENERIC, 
String.valueOf(false));
+                       }},
+                       ""
                );
        }
 
@@ -164,125 +213,12 @@ public class HiveCatalogGenericMetadataTest extends 
CatalogTestBase {
                assertEquals(table.getSchema(), 
catalog.getTable(path1).getSchema());
        }
 
-       // ------ partitions ------
-
-       @Test
-       public void testCreatePartition() throws Exception {
-       }
-
-       @Test
-       public void testCreatePartition_TableNotExistException() throws 
Exception {
-       }
-
-       @Test
-       public void testCreatePartition_TableNotPartitionedException() throws 
Exception {
-       }
-
-       @Test
-       public void testCreatePartition_PartitionSpecInvalidException() throws 
Exception {
-       }
-
-       @Test
-       public void testCreatePartition_PartitionAlreadyExistsException() 
throws Exception {
-       }
-
-       @Test
-       public void testCreatePartition_PartitionAlreadyExists_ignored() throws 
Exception {
-       }
-
-       @Test
-       public void testDropPartition() throws Exception {
-       }
-
-       @Test
-       public void testDropPartition_TableNotExist() throws Exception {
-       }
-
-       @Test
-       public void testDropPartition_TableNotPartitioned() throws Exception {
-       }
-
-       @Test
-       public void testDropPartition_PartitionSpecInvalid() throws Exception {
-       }
-
-       @Test
-       public void testDropPartition_PartitionNotExist() throws Exception {
-       }
-
-       @Test
-       public void testDropPartition_PartitionNotExist_ignored() throws 
Exception {
-       }
-
-       @Test
-       public void testAlterPartition() throws Exception {
-       }
-
-       @Test
-       public void testAlterPartition_TableNotExist() throws Exception {
-       }
-
-       @Test
-       public void testAlterPartition_TableNotPartitioned() throws Exception {
-       }
-
-       @Test
-       public void testAlterPartition_PartitionSpecInvalid() throws Exception {
-       }
-
-       @Test
-       public void testAlterPartition_PartitionNotExist() throws Exception {
-       }
-
-       @Test
-       public void testAlterPartition_PartitionNotExist_ignored() throws 
Exception {
-       }
-
-       @Test
-       public void testGetPartition_TableNotExist() throws Exception {
-       }
-
-       @Test
-       public void testGetPartition_TableNotPartitioned() throws Exception {
-       }
-
-       @Test
-       public void 
testGetPartition_PartitionSpecInvalid_invalidPartitionSpec() throws Exception {
-       }
-
-       @Test
-       public void testGetPartition_PartitionSpecInvalid_sizeNotEqual() throws 
Exception {
-       }
-
-       @Test
-       public void testGetPartition_PartitionNotExist() throws Exception {
-       }
-
-       @Test
-       public void testPartitionExists() throws Exception {
-       }
-
-       @Test
-       public void testListPartitionPartialSpec() throws Exception {
-       }
-
-       @Override
-       public void testGetPartitionStats() throws Exception {
-       }
-
-       @Override
-       public void testAlterPartitionTableStats() throws Exception {
-       }
-
-       // ------ test utils ------
-
-       @Override
-       protected boolean isGeneric() {
-               return true;
-       }
-
-       @Override
-       public CatalogPartition createPartition() {
-               throw new UnsupportedOperationException();
+       private static CatalogDatabase createDb() {
+               return new CatalogDatabaseImpl(
+                       new HashMap<String, String>() {{
+                               put("k1", "v1");
+                       }},
+                       ""
+               );
        }
 }
diff --git 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
index 83e0132..2bd0310 100644
--- 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
+++ 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/catalog/hive/HiveCatalogGenericMetadataTest.java
@@ -18,26 +18,12 @@
 
 package org.apache.flink.table.catalog.hive;
 
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.api.TableSchema;
 import org.apache.flink.table.catalog.CatalogPartition;
-import org.apache.flink.table.catalog.CatalogTable;
-import org.apache.flink.table.catalog.CatalogTableImpl;
 import org.apache.flink.table.catalog.CatalogTestBase;
-import org.apache.flink.table.catalog.exceptions.CatalogException;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.logical.BinaryType;
-import org.apache.flink.table.types.logical.VarBinaryType;
 
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.util.Arrays;
-
-import static org.junit.Assert.assertEquals;
-
 /**
  * Test for HiveCatalog on generic metadata.
  */
@@ -49,121 +35,6 @@ public class HiveCatalogGenericMetadataTest extends 
CatalogTestBase {
                catalog.open();
        }
 
-       // ------ TODO: Move data types tests to its own test class as it's 
shared between generic metadata and hive metadata
-       // ------ data types ------
-
-       @Test
-       public void testDataTypes() throws Exception {
-               // TODO: the following Hive types are not supported in Flink 
yet, including MAP, STRUCT
-               DataType[] types = new DataType[] {
-                       DataTypes.TINYINT(),
-                       DataTypes.SMALLINT(),
-                       DataTypes.INT(),
-                       DataTypes.BIGINT(),
-                       DataTypes.FLOAT(),
-                       DataTypes.DOUBLE(),
-                       DataTypes.BOOLEAN(),
-                       DataTypes.STRING(),
-                       DataTypes.BYTES(),
-                       DataTypes.DATE(),
-                       DataTypes.TIMESTAMP(),
-                       DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH),
-                       DataTypes.VARCHAR(HiveVarchar.MAX_VARCHAR_LENGTH),
-                       DataTypes.DECIMAL(5, 3)
-               };
-
-               verifyDataTypes(types);
-       }
-
-       @Test
-       public void testNonExactlyMatchedDataTypes() throws Exception {
-               DataType[] types = new DataType[] {
-                       DataTypes.BINARY(BinaryType.MAX_LENGTH),
-                       DataTypes.VARBINARY(VarBinaryType.MAX_LENGTH)
-               };
-
-               CatalogTable table = createCatalogTable(types);
-
-               catalog.createDatabase(db1, createDb(), false);
-               catalog.createTable(path1, table, false);
-
-               Arrays.equals(
-                       new DataType[] {DataTypes.BYTES(), DataTypes.BYTES()},
-                       
catalog.getTable(path1).getSchema().getFieldDataTypes());
-       }
-
-       @Test
-       public void testCharTypeLength() throws Exception {
-               DataType[] types = new DataType[] {
-                       DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH + 1)
-               };
-
-               exception.expect(CatalogException.class);
-               exception.expectMessage("HiveCatalog doesn't support char type 
with length of '256'. The maximum length is 255");
-               verifyDataTypes(types);
-       }
-
-       @Test
-       public void testVarCharTypeLength() throws Exception {
-               DataType[] types = new DataType[] {
-                       DataTypes.VARCHAR(HiveVarchar.MAX_VARCHAR_LENGTH + 1)
-               };
-
-               exception.expect(CatalogException.class);
-               exception.expectMessage("HiveCatalog doesn't support varchar 
type with length of '65536'. The maximum length is 65535");
-               verifyDataTypes(types);
-       }
-
-       @Test
-       public void testComplexDataTypes() throws Exception {
-               DataType[] types = new DataType[]{
-                       DataTypes.ARRAY(DataTypes.DOUBLE()),
-                       DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()),
-                       DataTypes.ROW(
-                               DataTypes.FIELD("0", DataTypes.BOOLEAN()),
-                               DataTypes.FIELD("1", DataTypes.BOOLEAN()),
-                               DataTypes.FIELD("2", DataTypes.DATE())),
-
-                       // nested complex types
-                       DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT())),
-                       DataTypes.MAP(DataTypes.STRING(), 
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
-                       DataTypes.ROW(
-                               DataTypes.FIELD("3", 
DataTypes.ARRAY(DataTypes.DECIMAL(5, 3))),
-                               DataTypes.FIELD("4", 
DataTypes.MAP(DataTypes.TINYINT(), DataTypes.SMALLINT())),
-                               DataTypes.FIELD("5", 
DataTypes.ROW(DataTypes.FIELD("3", DataTypes.TIMESTAMP())))
-                       )
-               };
-
-               verifyDataTypes(types);
-       }
-
-       private CatalogTable createCatalogTable(DataType[] types) {
-               String[] colNames = new String[types.length];
-
-               for (int i = 0; i < types.length; i++) {
-                       colNames[i] = String.format("%s_%d", 
types[i].toString().toLowerCase(), i);
-               }
-
-               TableSchema schema = TableSchema.builder()
-                       .fields(colNames, types)
-                       .build();
-
-               return new CatalogTableImpl(
-                       schema,
-                       getBatchTableProperties(),
-                       TEST_COMMENT
-               );
-       }
-
-       private void verifyDataTypes(DataType[] types) throws Exception {
-               CatalogTable table = createCatalogTable(types);
-
-               catalog.createDatabase(db1, createDb(), false);
-               catalog.createTable(path1, table, false);
-
-               assertEquals(table.getSchema(), 
catalog.getTable(path1).getSchema());
-       }
-
        // ------ partitions ------
 
        @Test
diff --git 
a/flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/CatalogTest.java
 
b/flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/CatalogTest.java
index 357eb23..0d9c2a2 100644
--- 
a/flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/CatalogTest.java
+++ 
b/flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/CatalogTest.java
@@ -110,7 +110,9 @@ public abstract class CatalogTest {
 
        @AfterClass
        public static void closeup() {
-               catalog.close();
+               if (catalog != null) {
+                       catalog.close();
+               }
        }
 
        // ------ databases ------

Reply via email to