Github user rxin commented on the pull request:
https://github.com/apache/spark/pull/1003#issuecomment-45403070
This might not be your problem, but when I tried the following, I got ...
```
scala> c.hql("explain select key, count(value) from src group by
key").collect()
14/06/06 23:58:05 INFO parse.ParseDriver: Parsing command: explain select
key, count(value) from src group by key
14/06/06 23:58:05 INFO parse.ParseDriver: Parse Completed
14/06/06 23:58:05 INFO analysis.Analyzer: Max iterations (2) reached for
batch MultiInstanceRelations
14/06/06 23:58:05 INFO analysis.Analyzer: Max iterations (2) reached for
batch CaseInsensitiveAttributeReferences
14/06/06 23:58:05 INFO analysis.Analyzer: Max iterations (2) reached for
batch MultiInstanceRelations
14/06/06 23:58:05 INFO analysis.Analyzer: Max iterations (2) reached for
batch CaseInsensitiveAttributeReferences
14/06/06 23:58:05 INFO metastore.HiveMetaStore: 0: get_table : db=default
tbl=src
14/06/06 23:58:05 INFO HiveMetaStore.audit: ugi=rxin
ip=unknown-ip-addr cmd=get_table : db=default tbl=src
14/06/06 23:58:05 INFO storage.MemoryStore: ensureFreeSpace(147699) called
with curMem=737503, maxMem=1145674137
14/06/06 23:58:05 INFO storage.MemoryStore: Block broadcast_5 stored as
values to memory (estimated size 144.2 KB, free 1091.8 MB)
14/06/06 23:58:05 INFO sql.SQLContext$$anon$1: Max iterations (2) reached
for batch Add exchange
14/06/06 23:58:05 INFO sql.SQLContext$$anon$1: Max iterations (2) reached
for batch Prepare Expressions
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: makeCopy,
tree:
ExplainCommandPhysical
Aggregate false, [key#12], [key#12,SUM(PartialCount#14L) AS c_1#10L]
Exchange (HashPartitioning [key#12:0], 150)
Aggregate true, [key#12], [key#12,COUNT(value#13) AS PartialCount#14L]
HiveTableScan [key#12,value#13], (MetastoreRelation default, src,
None), None
at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:47)
at
org.apache.spark.sql.catalyst.trees.TreeNode.makeCopy(TreeNode.scala:265)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenUp(TreeNode.scala:249)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:215)
at org.apache.spark.sql.execution.AddExchange$.apply(Exchange.scala:93)
at org.apache.spark.sql.execution.AddExchange$.apply(Exchange.scala:89)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:62)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1$$anonfun$apply$2.apply(RuleExecutor.scala:60)
at
scala.collection.IndexedSeqOptimized$class.foldl(IndexedSeqOptimized.scala:51)
at
scala.collection.IndexedSeqOptimized$class.foldLeft(IndexedSeqOptimized.scala:60)
at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:34)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:60)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$apply$1.apply(RuleExecutor.scala:52)
at scala.collection.immutable.List.foreach(List.scala:318)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.apply(RuleExecutor.scala:52)
at
org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:275)
at
org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:275)
at
org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd$lzycompute(HiveContext.scala:260)
at
org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd(HiveContext.scala:248)
at org.apache.spark.sql.hive.HiveContext.hiveql(HiveContext.scala:85)
at org.apache.spark.sql.hive.HiveContext.hql(HiveContext.scala:90)
at $i$$$$9579e5b89ab1eb428704b684e2e341c$$$$$.<init>(<console>:70)
at $i$$$$9579e5b89ab1eb428704b684e2e341c$$$$$.<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:734)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:983)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:573)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:604)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:568)
at scala.tools.nsc.interpreter.ILoop.reallyInterpret$1(ILoop.scala:760)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:805)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:717)
at scala.tools.nsc.interpreter.ILoop.processLine$1(ILoop.scala:581)
at scala.tools.nsc.interpreter.ILoop.innerLoop$1(ILoop.scala:588)
at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:591)
at
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:882)
at
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:837)
at
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:837)
at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:837)
at scala.tools.nsc.interpreter.ILoop.main(ILoop.scala:904)
at xsbt.ConsoleInterface.run(ConsoleInterface.scala:69)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at sbt.compiler.AnalyzingCompiler.call(AnalyzingCompiler.scala:102)
at sbt.compiler.AnalyzingCompiler.console(AnalyzingCompiler.scala:77)
at sbt.Console.sbt$Console$$console0$1(Console.scala:23)
at
sbt.Console$$anonfun$apply$2$$anonfun$apply$1.apply$mcV$sp(Console.scala:24)
at sbt.Console$$anonfun$apply$2$$anonfun$apply$1.apply(Console.scala:24)
at sbt.Console$$anonfun$apply$2$$anonfun$apply$1.apply(Console.scala:24)
at sbt.Logger$$anon$4.apply(Logger.scala:90)
at sbt.TrapExit$App.run(TrapExit.scala:244)
at java.lang.Thread.run(Thread.java:744)
Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException:
Failed to copy node. Is otherCopyArgs specified correctly for
ExplainCommandPhysical?, tree:
ExplainCommandPhysical
Aggregate false, [key#12], [key#12,SUM(PartialCount#14L) AS c_1#10L]
Exchange (HashPartitioning [key#12:0], 150)
Aggregate true, [key#12], [key#12,COUNT(value#13) AS PartialCount#14L]
HiveTableScan [key#12,value#13], (MetastoreRelation default, src,
None), None
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$makeCopy$1.apply(TreeNode.scala:275)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$makeCopy$1.apply(TreeNode.scala:266)
at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46)
... 60 more
```
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---