dilipbiswal commented on a change in pull request #28802:
URL: https://github.com/apache/spark/pull/28802#discussion_r439126001
##########
File path:
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
##########
@@ -285,334 +290,61 @@ class TableIdentifierParserSuite extends SparkFunSuite
with SQLHelper {
"where",
"with")
- // All the keywords in `docs/sql-keywords.md` are listed below:
- val allCandidateKeywords = Set(
- "add",
- "after",
- "all",
- "alter",
- "analyze",
- "and",
- "anti",
- "any",
- "archive",
- "array",
- "as",
- "asc",
- "at",
- "authorization",
- "between",
- "both",
- "bucket",
- "buckets",
- "by",
- "cache",
- "cascade",
- "case",
- "cast",
- "change",
- "check",
- "clear",
- "cluster",
- "clustered",
- "codegen",
- "collate",
- "collection",
- "column",
- "columns",
- "comment",
- "commit",
- "compact",
- "compactions",
- "compute",
- "concatenate",
- "constraint",
- "cost",
- "create",
- "cross",
- "cube",
- "current",
- "current_date",
- "current_time",
- "current_timestamp",
- "current_user",
- "data",
- "database",
- "databases",
- "day",
- "dbproperties",
- "defined",
- "delete",
- "delimited",
- "desc",
- "describe",
- "dfs",
- "directories",
- "directory",
- "distinct",
- "distribute",
- "div",
- "drop",
- "else",
- "end",
- "escape",
- "escaped",
- "except",
- "exchange",
- "exists",
- "explain",
- "export",
- "extended",
- "external",
- "extract",
- "false",
- "fetch",
- "fields",
- "fileformat",
- "first",
- "following",
- "for",
- "foreign",
- "format",
- "formatted",
- "from",
- "full",
- "function",
- "functions",
- "global",
- "grant",
- "group",
- "grouping",
- "having",
- "hour",
- "if",
- "ignore",
- "import",
- "in",
- "index",
- "indexes",
- "inner",
- "inpath",
- "inputformat",
- "insert",
- "intersect",
- "interval",
- "into",
- "is",
- "items",
- "join",
- "keys",
- "last",
- "lateral",
- "lazy",
- "leading",
- "left",
- "like",
- "limit",
- "lines",
- "list",
- "load",
- "local",
- "location",
- "lock",
- "locks",
- "logical",
- "macro",
- "map",
- "minus",
- "minute",
- "month",
- "msck",
- "namespaces",
- "natural",
- "no",
- "not",
- "null",
- "nulls",
- "of",
- "on",
- "only",
- "option",
- "options",
- "or",
- "order",
- "out",
- "outer",
- "outputformat",
- "over",
- "overlaps",
- "overlay",
- "overwrite",
- "partition",
- "partitioned",
- "partitions",
- "percent",
- "pivot",
- "placing",
- "position",
- "preceding",
- "primary",
- "principals",
- "purge",
- "query",
- "range",
- "recordreader",
- "recordwriter",
- "recover",
- "reduce",
- "references",
- "refresh",
- "rename",
- "repair",
- "replace",
- "reset",
- "restrict",
- "revoke",
- "right",
- "rlike",
- "role",
- "roles",
- "rollback",
- "rollup",
- "row",
- "rows",
- "schema",
- "second",
- "select",
- "semi",
- "separated",
- "serde",
- "serdeproperties",
- "session_user",
- "set",
- "sets",
- "show",
- "skewed",
- "some",
- "sort",
- "sorted",
- "start",
- "statistics",
- "stored",
- "stratify",
- "struct",
- "substr",
- "substring",
- "table",
- "tables",
- "tablesample",
- "tblproperties",
- "temporary",
- "terminated",
- "then",
- "to",
- "touch",
- "trailing",
- "transaction",
- "transactions",
- "transform",
- "true",
- "truncate",
- "type",
- "unarchive",
- "unbounded",
- "uncache",
- "union",
- "unique",
- "unknown",
- "unlock",
- "unset",
- "use",
- "user",
- "using",
- "values",
- "view",
- "views",
- "when",
- "where",
- "window",
- "with",
- "year")
+ private val sqlSyntaxDefs = {
+ val sqlBasePath = {
+ val sparkHome = {
+ assert(sys.props.contains("spark.test.home") ||
Review comment:
@maropu Minor Nit. I was trying to run this and i didn't have the
environment variable set. The error message ends up printing the full env info
making it difficult to find what the error is ? Should we use something like:
if (condition) {
fail(...)
}
or may be there is a better way :-).
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]