Github user felixcheung commented on a diff in the pull request:
https://github.com/apache/spark/pull/17785#discussion_r113854501
--- Diff: sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
---
@@ -92,48 +93,8 @@ private[sql] object SQLUtils extends Logging {
def r: Regex = new Regex(sc.parts.mkString, sc.parts.tail.map(_ =>
"x"): _*)
}
- def getSQLDataType(dataType: String): DataType = {
- dataType match {
- case "byte" => org.apache.spark.sql.types.ByteType
- case "integer" => org.apache.spark.sql.types.IntegerType
- case "float" => org.apache.spark.sql.types.FloatType
- case "double" => org.apache.spark.sql.types.DoubleType
- case "numeric" => org.apache.spark.sql.types.DoubleType
- case "character" => org.apache.spark.sql.types.StringType
- case "string" => org.apache.spark.sql.types.StringType
- case "binary" => org.apache.spark.sql.types.BinaryType
- case "raw" => org.apache.spark.sql.types.BinaryType
- case "logical" => org.apache.spark.sql.types.BooleanType
- case "boolean" => org.apache.spark.sql.types.BooleanType
- case "timestamp" => org.apache.spark.sql.types.TimestampType
- case "date" => org.apache.spark.sql.types.DateType
- case r"\Aarray<(.+)${elemType}>\Z" =>
- org.apache.spark.sql.types.ArrayType(getSQLDataType(elemType))
- case r"\Amap<(.+)${keyType},(.+)${valueType}>\Z" =>
- if (keyType != "string" && keyType != "character") {
- throw new IllegalArgumentException("Key type of a map must be
string or character")
- }
- org.apache.spark.sql.types.MapType(getSQLDataType(keyType),
getSQLDataType(valueType))
- case r"\Astruct<(.+)${fieldsStr}>\Z" =>
- if (fieldsStr(fieldsStr.length - 1) == ',') {
- throw new IllegalArgumentException(s"Invalid type $dataType")
- }
- val fields = fieldsStr.split(",")
- val structFields = fields.map { field =>
- field match {
- case r"\A(.+)${fieldName}:(.+)${fieldType}\Z" =>
- createStructField(fieldName, fieldType, true)
-
- case _ => throw new IllegalArgumentException(s"Invalid type
$dataType")
- }
- }
- createStructType(structFields)
- case _ => throw new IllegalArgumentException(s"Invalid type
$dataType")
- }
- }
-
def createStructField(name: String, dataType: String, nullable:
Boolean): StructField = {
- val dtObj = getSQLDataType(dataType)
+ val dtObj = CatalystSqlParser.parseDataType(dataType)
--- End diff --
thanks for looking into it. if I take the diff,
```
character
logical
numeric
raw
```
these are actually R native type names though, for which if I have to
guess, is intentional that we support R native type in structField as well as
Scala/Spark types.
I'm not sure how much coverage we have for something like this but is that
going to still work with this change?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]