Github user xwu0226 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12406#discussion_r59909616
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala 
---
    @@ -625,11 +629,215 @@ private[hive] class HiveClientImpl(
           }
       }
     
    +  override def showCreateTable(db: String, tableName: String): String = 
withHiveState {
    +    Option(client.getTable(db, tableName, false)).map { hiveTable =>
    +      val tblProperties = hiveTable.getParameters.asScala.toMap
    +      if (tblProperties.get("spark.sql.sources.provider").isDefined) {
    +        generateDataSourceDDL(hiveTable)
    +      } else {
    +        generateHiveDDL(hiveTable)
    +      }
    +    }.get
    +  }
    +
     
       /* -------------------------------------------------------- *
        |  Helper methods for converting to and from Hive classes  |
        * -------------------------------------------------------- */
     
    +  private def generateCreateTableHeader(
    +              hiveTable: HiveTable,
    +              processedProps: 
scala.collection.mutable.ArrayBuffer[String]): String = {
    +    val sb = new StringBuilder("CREATE ")
    +    if(hiveTable.isTemporary) {
    +      sb.append("TEMPORARY ")
    +    }
    +    if (hiveTable.getTableType == HiveTableType.EXTERNAL_TABLE) {
    +      processedProps += "EXTERNAL"
    +      sb.append("EXTERNAL TABLE " +
    +        quoteIdentifier(hiveTable.getDbName) + "." + 
quoteIdentifier(hiveTable.getTableName))
    +    } else {
    +      sb.append("TABLE " +
    +        quoteIdentifier(hiveTable.getDbName) + "." + 
quoteIdentifier(hiveTable.getTableName))
    +    }
    +    sb.toString()
    +  }
    +
    +  private def generateColsDataSource(
    +              hiveTable: HiveTable,
    +              processedProps: 
scala.collection.mutable.ArrayBuffer[String]): String = {
    +    val schemaStringFromParts: Option[String] = {
    +      val props = hiveTable.getParameters.asScala
    +      props.get("spark.sql.sources.schema.numParts").map { numParts =>
    +        val parts = (0 until numParts.toInt).map { index =>
    +          val part = 
props.get(s"spark.sql.sources.schema.part.$index").orNull
    +          if (part == null) {
    +            throw new AnalysisException(
    +              "Could not read schema from the metastore because it is 
corrupted " +
    +                s"(missing part $index of the schema, $numParts parts are 
expected).")
    +          }
    +          part
    +        }
    +        // Stick all parts back to a single schema string.
    +        parts.mkString
    +      }
    +    }
    +
    +    if (schemaStringFromParts.isDefined) {
    +      (schemaStringFromParts.map(s => 
DataType.fromJson(s).asInstanceOf[StructType]).
    +        get map { f => s"${quoteIdentifier(f.name)} ${f.dataType.sql}" })
    +        .mkString("( ", ", ", " )")
    +    } else {
    +      ""
    +    }
    +  }
    +
    +  private def generateDataSourceDDL(hiveTable: HiveTable): String = {
    --- End diff --
    
    @hvanhovell  I looked through the Hive's Hive, Table and MetaStoreClient 
APIs and have not found an API function that can do this for us. The way I see 
Hive implements this is through DDLTask.showCreateTable, which is executed 
through normal hive command parsing and executing route.. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to