Github user dongjoon-hyun commented on a diff in the pull request:
https://github.com/apache/spark/pull/20866#discussion_r176307526
--- Diff:
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
---
@@ -908,11 +912,39 @@ private[hive] object HiveClientImpl {
Utils.classForName(name)
.asInstanceOf[Class[_ <:
org.apache.hadoop.hive.ql.io.HiveOutputFormat[_, _]]]
+ private def toHiveMetaApiTable(table: CatalogTable): HiveMetaApiTable = {
+ val sd = new StorageDescriptor
+ sd.setSerdeInfo(new SerDeInfo)
+ sd.setNumBuckets(-1)
+ sd.setBucketCols(new JArrayList[String])
+ sd.setCols(new JArrayList[FieldSchema])
+ sd.setParameters(new JHashMap[String, String])
+ sd.setSortCols(new JArrayList[Order])
+ sd.getSerdeInfo.setParameters(new JHashMap[String, String])
+ sd.getSerdeInfo.getParameters.put(serdeConstants.SERIALIZATION_FORMAT,
"1")
+ sd.setInputFormat(classOf[SequenceFileInputFormat[_, _]].getName)
+ sd.setOutputFormat(classOf[HiveSequenceFileOutputFormat[_, _]].getName)
+ val skewInfo: SkewedInfo = new SkewedInfo
+ skewInfo.setSkewedColNames(new JArrayList[String])
+ skewInfo.setSkewedColValues(new JArrayList[JList[String]])
+ skewInfo.setSkewedColValueLocationMaps(new JHashMap[JList[String],
String])
+ sd.setSkewedInfo(skewInfo)
+
+ val apiTable = new HiveMetaApiTable()
+ apiTable.setSd(sd)
+ apiTable.setPartitionKeys(new JArrayList[FieldSchema])
+ apiTable.setParameters(new JHashMap[String, String])
+ apiTable.setTableType(HiveTableType.MANAGED_TABLE.toString)
+ apiTable.setDbName(table.database)
+ apiTable.setTableName(table.identifier.table)
--- End diff --
In addition to `setOwner`, `setCreateTime` is also omitted.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]