I have not seen this error but have seen another user have weird parser
issues before:

http://mail-archives.us.apache.org/mod_mbox/spark-user/201501.mbox/%3ccag6lhyed_no6qrutwsxeenrbqjuuzvqtbpxwx4z-gndqoj3...@mail.gmail.com%3E

I would attach a debugger and see what is going on -- if I'm looking at the
right place (
http://grepcode.com/file/repo1.maven.org/maven2/org.apache.hive/hive-exec/0.13.1/org/apache/hadoop/hive/ql/parse/HiveParser.java#HiveParser)
token 294 is RCURLY...which doesnt make much sense...

On Thu, May 21, 2015 at 2:10 AM, Devarajan Srinivasan <
devathecool1...@gmail.com> wrote:

> Hi,
>
>    I was testing spark to read data from hive using HiveContext. I got the
> following error, when I used a simple query with constants in predicates.
>
>   I am using spark 1.3*. *Anyone encountered error like this ??
>
>
> *Error:*
>
>
> Exception in thread "main" org.apache.spark.sql.AnalysisException:
> Unsupported language features in query: SELECT * from test_table where
> daily_partition='20150101'
> TOK_QUERY 1, 0,20, 81
>   TOK_FROM 1, 10,14, 81
>     TOK_TABREF 1, 12,14, 81
>       TOK_TABNAME 1, 12,14, 81
>         everest_marts_test 1, 12,12, 81
>         voice_cdr 1, 14,14, 100
>   TOK_INSERT 0, -1,-1, 0
>     TOK_DESTINATION 0, -1,-1, 0
>       TOK_DIR 0, -1,-1, 0
>         TOK_TMP_FILE 0, -1,-1, 0
>     TOK_SELECT 1, 0,8, 7
>       TOK_SELEXPR 1, 2,2, 7
>         TOK_TABLE_OR_COL 1, 2,2, 7
>           callingpartynumber 1, 2,2, 7
>       TOK_SELEXPR 1, 4,4, 26
>         TOK_TABLE_OR_COL 1, 4,4, 26
>           calledpartynumber 1, 4,4, 26
>       TOK_SELEXPR 1, 6,6, 44
>         TOK_TABLE_OR_COL 1, 6,6, 44
>           chargingtime 1, 6,6, 44
>       TOK_SELEXPR 1, 8,8, 57
>         TOK_TABLE_OR_COL 1, 8,8, 57
>           call_direction_key 1, 8,8, 57
>     TOK_WHERE 1, 16,20, 131
>       = 1, 18,20, 131
>         TOK_TABLE_OR_COL 1, 18,18, 116
>           daily_partition 1, 18,18, 116
>         '20150101' 1, 20,20, 132
>
> scala.NotImplementedError: No parse rules for ASTNode type: 294, text:
> '20150101' :
> '20150101' 1, 20,20, 132
> " +
> org.apache.spark.sql.hive.HiveQl$.nodeToExpr(HiveQl.scala:1261)
>           ;
>     at org.apache.spark.sql.hive.HiveQl$.createPlan(HiveQl.scala:261)
>     at org.apache.spark.sql.hive.ExtendedHiveQlParser$$anonfun$
> hiveQl$1.apply(ExtendedHiveQlParser.scala:41)
>     at org.apache.spark.sql.hive.ExtendedHiveQlParser$$anonfun$
> hiveQl$1.apply(ExtendedHiveQlParser.scala:40)
>     at scala.util.parsing.combinator.Parsers$Success.map(Parsers.
> scala:136)
>     at scala.util.parsing.combinator.Parsers$Success.map(Parsers.
> scala:135)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.
> apply(Parsers.scala:242)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.
> apply(Parsers.scala:242)
>     at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.
> scala:222)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1$$anonfun$apply$2.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1$$anonfun$apply$2.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$Failure.append(
> Parsers.scala:202)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.
> scala:222)
>     at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$
> apply$14.apply(Parsers.scala:891)
>     at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$
> apply$14.apply(Parsers.scala:891)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.
> scala:890)
>     at scala.util.parsing.combinator.PackratParsers$$anon$1.apply(
> PackratParsers.scala:110)
>     at org.apache.spark.sql.catalyst.AbstractSparkSQLParser.apply(
> AbstractSparkSQLParser.scala:38)
>     at org.apache.spark.sql.hive.HiveQl$$anonfun$3.apply(HiveQl.scala:138)
>     at org.apache.spark.sql.hive.HiveQl$$anonfun$3.apply(HiveQl.scala:138)
>     at org.apache.spark.sql.SparkSQLParser$$anonfun$org$apache$spark$sql$
> SparkSQLParser$$others$1.apply(SparkSQLParser.scala:96)
>     at org.apache.spark.sql.SparkSQLParser$$anonfun$org$apache$spark$sql$
> SparkSQLParser$$others$1.apply(SparkSQLParser.scala:95)
>     at scala.util.parsing.combinator.Parsers$Success.map(Parsers.
> scala:136)
>     at scala.util.parsing.combinator.Parsers$Success.map(Parsers.
> scala:135)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.
> apply(Parsers.scala:242)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.
> apply(Parsers.scala:242)
>     at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.
> scala:222)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1$$anonfun$apply$2.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1$$anonfun$apply$2.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$Failure.append(
> Parsers.scala:202)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$Parser$$anonfun$
> append$1.apply(Parsers.scala:254)
>     at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.
> scala:222)
>     at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$
> apply$14.apply(Parsers.scala:891)
>     at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$
> apply$14.apply(Parsers.scala:891)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.
> scala:890)
>     at scala.util.parsing.combinator.PackratParsers$$anon$1.apply(
> PackratParsers.scala:110)
>     at org.apache.spark.sql.catalyst.AbstractSparkSQLParser.apply(
> AbstractSparkSQLParser.scala:38)
>     at org.apache.spark.sql.hive.HiveQl$.parseSql(HiveQl.scala:234)
>     at org.apache.spark.sql.hive.HiveContext$$anonfun$sql$1.
> apply(HiveContext.scala:92)
>     at org.apache.spark.sql.hive.HiveContext$$anonfun$sql$1.
> apply(HiveContext.scala:92)
>     at scala.Option.getOrElse(Option.scala:120)
>     at org.apache.spark.sql.hive.HiveContext.sql(HiveContext.scala:92)
>     at com.xoanon.spark.hive.HiveSelectTest.main(HiveSelectTest.java:22)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>     at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:606)
>     at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$
> deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
>     at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(
> SparkSubmit.scala:166)
>     at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
>     at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
>     at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>

Reply via email to