AngersZhuuuu commented on a change in pull request #29085:
URL: https://github.com/apache/spark/pull/29085#discussion_r458829828
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
##########
@@ -679,64 +680,78 @@ class SparkSqlAstBuilder(conf: SQLConf) extends
AstBuilder(conf) {
"Unsupported operation: Used defined record reader/writer classes.",
ctx)
}
- // Decode and input/output format.
- type Format = (Seq[(String, String)], Option[String], Seq[(String,
String)], Option[String])
- def format(
- fmt: RowFormatContext,
- configKey: String,
- defaultConfigValue: String): Format = fmt match {
- case c: RowFormatDelimitedContext =>
- // TODO we should use the visitRowFormatDelimited function here.
However HiveScriptIOSchema
- // expects a seq of pairs in which the old parsers' token names are
used as keys.
- // Transforming the result of visitRowFormatDelimited would be quite a
bit messier than
- // retrieving the key value pairs ourselves.
- def entry(key: String, value: Token): Seq[(String, String)] = {
- Option(value).map(t => key -> t.getText).toSeq
- }
- val entries = entry("TOK_TABLEROWFORMATFIELD", c.fieldsTerminatedBy) ++
- entry("TOK_TABLEROWFORMATCOLLITEMS", c.collectionItemsTerminatedBy)
++
- entry("TOK_TABLEROWFORMATMAPKEYS", c.keysTerminatedBy) ++
- entry("TOK_TABLEROWFORMATLINES", c.linesSeparatedBy) ++
- entry("TOK_TABLEROWFORMATNULL", c.nullDefinedAs)
-
- (entries, None, Seq.empty, None)
+ if (!conf.getConf(CATALOG_IMPLEMENTATION).equals("hive")) {
+ super.withScriptIOSchema(
+ ctx,
+ inRowFormat,
+ recordWriter,
+ outRowFormat,
+ recordReader,
+ schemaLess)
+ } else {
- case c: RowFormatSerdeContext =>
- // Use a serde format.
- val CatalogStorageFormat(None, None, None, Some(name), _, props) =
visitRowFormatSerde(c)
+ // Decode and input/output format.
+ type Format = (Seq[(String, String)], Option[String], Seq[(String,
String)], Option[String])
+
+ def format(
+ fmt: RowFormatContext,
+ configKey: String,
+ defaultConfigValue: String): Format = fmt match {
+ case c: RowFormatDelimitedContext =>
+ // TODO we should use visitRowFormatDelimited function here. However
HiveScriptIOSchema
+ // expects a seq of pairs in which the old parsers' token names are
used as keys.
+ // Transforming the result of visitRowFormatDelimited would be quite
a bit messier than
+ // retrieving the key value pairs ourselves.
+ def entry(key: String, value: Token): Seq[(String, String)] = {
+ Option(value).map(t => key -> t.getText).toSeq
+ }
+
+ val entries = entry("TOK_TABLEROWFORMATFIELD", c.fieldsTerminatedBy)
++
+ entry("TOK_TABLEROWFORMATCOLLITEMS",
c.collectionItemsTerminatedBy) ++
+ entry("TOK_TABLEROWFORMATMAPKEYS", c.keysTerminatedBy) ++
+ entry("TOK_TABLEROWFORMATLINES", c.linesSeparatedBy) ++
+ entry("TOK_TABLEROWFORMATNULL", c.nullDefinedAs)
Review comment:
> hm... how about this?
[maropu@7954d0a](https://github.com/maropu/spark/commit/7954d0a075b1f43f5e36e27ceb526e65d8df2abc)
Extract function is ok but `getOrElse` not elegant
```
protect def getRowFormatDelimited(ctx: RowFormatDelimitedContext): Format = {
def entry(key: String, value: Token): Seq[(String, String)] = {
Option(value).map(t => key -> t.getText).toSeq
}
val entries = entry("TOK_TABLEROWFORMATFIELD", ctx.fieldsTerminatedBy) ++
entry("TOK_TABLEROWFORMATCOLLITEMS", ctx.collectionItemsTerminatedBy)
++
entry("TOK_TABLEROWFORMATMAPKEYS", ctx.keysTerminatedBy) ++
entry("TOK_TABLEROWFORMATLINES", ctx.linesSeparatedBy) ++
entry("TOK_TABLEROWFORMATNULL", ctx.nullDefinedAs)
(entries, None, Seq.empty, None)
}
```
use getOrElse is not elagant
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]