[ 
https://issues.apache.org/jira/browse/HIVE-17810?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16242239#comment-16242239
 ] 

Stephen Patel commented on HIVE-17810:
--------------------------------------

I can't seem to attach a file, but here's a patch:

{noformat}
diff --git 
hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
 
hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
index 99af291..070fb48 100644
--- 
hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
+++ 
hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
@@ -18,11 +18,7 @@
  */
 package org.apache.hive.hcatalog.api;
 
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import com.google.common.collect.Maps;
 import org.apache.commons.lang.StringUtils;
@@ -131,6 +127,7 @@ public HCatTable(String dbName, String tableName) {
     this.sd.getSerdeInfo().setSerializationLib(DEFAULT_SERDE_CLASS);
     this.sd.getSerdeInfo().setParameters(new HashMap<String, String>());
     
this.sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, 
"1"); // Default serialization format.
+    this.sd.setCols(new ArrayList<FieldSchema>());
   }
 
   HCatTable(Table hiveTable) throws HCatException {
diff --git 
hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
 
hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
index 78e767e..85697ba 100644
--- 
hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
+++ 
hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
@@ -30,7 +30,9 @@
 import java.util.Random;
 
 import com.google.common.base.Function;
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
+import org.apache.avro.Schema;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -43,11 +45,14 @@
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
+import org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat;
+import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
 import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -225,7 +230,7 @@ public void testBasicDDLCommands() throws Exception {
       table2.getSerdeParams().get(serdeConstants.COLLECTION_DELIM));
     assertEquals("checking " + serdeConstants.SERIALIZATION_NULL_FORMAT, 
Character.toString('\006'),
       table2.getSerdeParams().get(serdeConstants.SERIALIZATION_NULL_FORMAT));
-    
+
     assertTrue(table2.getLocation().toLowerCase().matches(".*" + ("/" + db + 
".db/" + tableTwo).toLowerCase()));
 
     HCatCreateTableDesc tableDesc3 = HCatCreateTableDesc.create(db,
@@ -285,6 +290,39 @@ public void testEmptyTableInstantiation() throws Exception 
{
   }
 
   /**
+   * This test tests that a Create Table statement without columns works
+   * @throws Exception
+   */
+  @Test
+  public void testNoColumnTableInstantiation() throws Exception {
+    HCatClient client = HCatClient.create(new Configuration(hcatConf));
+
+
+    String dbName = "default";
+    String tblName = "testNoColumnTableInstantiation";
+    ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
+    HCatTable table = new HCatTable(dbName, tblName)
+                                .cols(cols)
+                                .serdeLib(AvroSerDe.class.getName())
+                                
.tblProps(ImmutableMap.of("avro.schema.literal", "{\"type\": \"record\"," +
+                                        "\"namespace\": \"com.example\"," +
+                                        "\"name\": \"FullName\"," +
+                                        "\"fields\": [{ \"name\": \"first\", 
\"type\": \"string\" }] }"))
+                                
.inputFileFormat(AvroContainerInputFormat.class.getName())
+                                
.outputFileFormat(AvroContainerOutputFormat.class.getName());
+
+    client.dropTable(dbName, tblName, true);
+    try {
+      // Create an avro table with no columns
+      client.createTable(HCatCreateTableDesc
+              .create(table, false)
+              .build());
+    }catch (Throwable e){
+      fail("An error occurred creating Columnless table: "+e.getMessage());
+    }
+  }
+
+  /**
    * Verifies that an inner map is present inside an outer map, with
    * all values being equal.
    */
{noformat}

> Creating a table through HCatClient without specifying columns throws a 
> NullPointerException on the server
> ----------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-17810
>                 URL: https://issues.apache.org/jira/browse/HIVE-17810
>             Project: Hive
>          Issue Type: Bug
>          Components: HCatalog
>            Reporter: Stephen Patel
>            Priority: Minor
>
> I've attached a simple test case using the AvroSerde (which generates it's 
> own columns) that, when run will throw this error:
> {noformat}
> 2017-10-13T15:49:17,697 ERROR [pool-6-thread-2] metastore.RetryingHMSHandler: 
> MetaException(message:java.lang.NullPointerException)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newMetaException(HiveMetaStore.java:6560)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_with_environment_context(HiveMetaStore.java:1635)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
>       at com.sun.proxy.$Proxy30.create_table_with_environment_context(Unknown 
> Source)
>       at 
> org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$create_table_with_environment_context.getResult(ThriftHiveMetastore.java:11710)
>       at 
> org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$create_table_with_environment_context.getResult(ThriftHiveMetastore.java:11694)
>       at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
>       at 
> org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:110)
>       at 
> org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:106)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1807)
>       at 
> org.apache.hadoop.hive.metastore.TUGIBasedProcessor.process(TUGIBasedProcessor.java:118)
>       at 
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.validateTblColumns(MetaStoreUtils.java:621)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_core(HiveMetaStore.java:1433)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_core(HiveMetaStore.java:1420)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_with_environment_context(HiveMetaStore.java:1621)
>       ... 20 more
> {noformat}
> By default the StorageDescriptor in the HCatTable class has a null column 
> list.  When calling hCatTable.cols(emptyList), the hCatTable will determine 
> that the list is equal to it's current column list and won't set the empty 
> column list on the StorageDescriptor, thus leading to the 
> NullPointerException.
> A workaround is to call HCatTable.cols with a list that contains a fake 
> field, and then call HCatTable.cols with an empty list.  This will set the 
> column list on the StorageDescriptor to the empty list, and allow the table 
> to be created.



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to