kasakrisz commented on code in PR #3745:
URL: https://github.com/apache/hive/pull/3745#discussion_r1020148045


##########
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java:
##########
@@ -804,31 +806,59 @@ public static Schema schema(Configuration config) {
   @VisibleForTesting
   static void overlayTableProperties(Configuration configuration, TableDesc 
tableDesc, Map<String, String> map) {
     Properties props = tableDesc.getProperties();
-    Table table = IcebergTableUtil.getTable(configuration, props);
-    String schemaJson = SchemaParser.toJson(table.schema());
 
     Maps.fromProperties(props).entrySet().stream()
         .filter(entry -> !map.containsKey(entry.getKey())) // map overrides 
tableDesc properties
         .forEach(entry -> map.put(entry.getKey(), entry.getValue()));
 
-    map.put(InputFormatConfig.TABLE_IDENTIFIER, 
props.getProperty(Catalogs.NAME));
-    map.put(InputFormatConfig.TABLE_LOCATION, table.location());
-    map.put(InputFormatConfig.TABLE_SCHEMA, schemaJson);
-    props.put(InputFormatConfig.PARTITION_SPEC, 
PartitionSpecParser.toJson(table.spec()));
-
-    // serialize table object into config
-    Table serializableTable = SerializableTable.copyOf(table);
-    checkAndSkipIoConfigSerialization(configuration, serializableTable);
-    map.put(InputFormatConfig.SERIALIZED_TABLE_PREFIX + 
tableDesc.getTableName(),
-        SerializationUtil.serializeToBase64(serializableTable));
+    try {
+      Table table = IcebergTableUtil.getTable(configuration, props);
+      String schemaJson = SchemaParser.toJson(table.schema());
+
+      map.put(InputFormatConfig.TABLE_IDENTIFIER, 
props.getProperty(Catalogs.NAME));
+      map.put(InputFormatConfig.TABLE_LOCATION, table.location());
+      map.put(InputFormatConfig.TABLE_SCHEMA, schemaJson);
+      props.put(InputFormatConfig.PARTITION_SPEC, 
PartitionSpecParser.toJson(table.spec()));
+
+      // serialize table object into config
+      Table serializableTable = SerializableTable.copyOf(table);
+      checkAndSkipIoConfigSerialization(configuration, serializableTable);
+      map.put(InputFormatConfig.SERIALIZED_TABLE_PREFIX + 
tableDesc.getTableName(),
+          SerializationUtil.serializeToBase64(serializableTable));
+
+      // We need to remove this otherwise the job.xml will be invalid as 
column comments are separated with '\0' and
+      // the serialization utils fail to serialize this character
+      map.remove("columns.comments");
+
+      // save schema into table props as well to avoid repeatedly hitting the 
HMS during serde initializations
+      // this is an exception to the interface documentation, but it's a safe 
operation to add this property
+      props.put(InputFormatConfig.TABLE_SCHEMA, schemaJson);
+    } catch (NoSuchTableException ex) {
+      if 
(!(StringUtils.isNotBlank(props.getProperty(hive_metastoreConstants.TABLE_IS_CTAS))
 &&
+          
Boolean.parseBoolean(props.getProperty(org.apache.hadoop.hive.conf.Constants.IS_EXPLAIN))))
 {
+        throw ex;
+      }
 
-    // We need to remove this otherwise the job.xml will be invalid as column 
comments are separated with '\0' and
-    // the serialization utils fail to serialize this character
-    map.remove("columns.comments");
+      try {
+        map.put(InputFormatConfig.TABLE_IDENTIFIER, 
props.getProperty(Catalogs.NAME));
+        map.put(InputFormatConfig.SERIALIZED_TABLE_PREFIX + 
tableDesc.getTableName(),
+            SerializationUtil.serializeToBase64(null));
 
-    // save schema into table props as well to avoid repeatedly hitting the 
HMS during serde initializations
-    // this is an exception to the interface documentation, but it's a safe 
operation to add this property
-    props.put(InputFormatConfig.TABLE_SCHEMA, schemaJson);
+        String location = map.get(hive_metastoreConstants.META_TABLE_LOCATION);
+        if (StringUtils.isBlank(location)) {
+          location = props.getProperty(hive_metastoreConstants.TABLE_IS_CTAS);
+        }
+        map.put(InputFormatConfig.TABLE_LOCATION, location);
+
+        AbstractSerDe serDe = tableDesc.getDeserializer(configuration);
+        HiveIcebergSerDe icebergSerDe = (HiveIcebergSerDe) serDe;
+        String schemaJson = SchemaParser.toJson(icebergSerDe.getTableSchema());
+        map.put(InputFormatConfig.TABLE_SCHEMA, schemaJson);
+        props.put(InputFormatConfig.TABLE_SCHEMA, schemaJson);
+      } catch (Exception e) {
+        throw new RuntimeException(e);

Review Comment:
   `MetaException` is a thrift generated and does not have a constructor which 
gets an inner exception.
   Not the best solution but `RuntimeException` elsewhere when checked 
exception (like `SemanticException`) has to be rethrown.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to