Repository: spark
Updated Branches:
  refs/heads/master 580c7011c -> c0b47bada


[SPARK-2767] [SQL] SparkSQL CLI doens't output error message if query failed.

Author: Cheng Hao <[email protected]>

Closes #1686 from chenghao-intel/spark_sql_cli and squashes the following 
commits:

eb664cc [Cheng Hao] Output detailed failure message in console
93b0382 [Cheng Hao] Fix Bug of no output in cli if exception thrown internally


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c0b47bad
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c0b47bad
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c0b47bad

Branch: refs/heads/master
Commit: c0b47bada3c9f0e9e0f14ab41ffb91012a357211
Parents: 580c701
Author: Cheng Hao <[email protected]>
Authored: Fri Aug 1 11:42:05 2014 -0700
Committer: Michael Armbrust <[email protected]>
Committed: Fri Aug 1 11:42:05 2014 -0700

----------------------------------------------------------------------
 .../spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala       | 4 +++-
 .../apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala   | 3 +--
 .../main/scala/org/apache/spark/sql/hive/HiveContext.scala    | 7 ++++---
 3 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c0b47bad/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 27268ec..cb17d7c 100755
--- 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -288,8 +288,10 @@ private[hive] class SparkSQLCLIDriver extends CliDriver 
with Logging {
             out.println(cmd)
           }
 
-          ret = driver.run(cmd).getResponseCode
+          val rc = driver.run(cmd)
+          ret = rc.getResponseCode
           if (ret != 0) {
+            console.printError(rc.getErrorMessage())
             driver.close()
             return ret
           }

http://git-wip-us.apache.org/repos/asf/spark/blob/c0b47bad/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
index 5202aa9..a56b19a 100644
--- 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
+++ 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
@@ -53,10 +53,9 @@ private[hive] class SparkSQLDriver(val context: HiveContext 
= SparkSQLEnv.hiveCo
   }
 
   override def run(command: String): CommandProcessorResponse = {
-    val execution = context.executePlan(context.hql(command).logicalPlan)
-
     // TODO unify the error code
     try {
+      val execution = context.executePlan(context.hql(command).logicalPlan)
       hiveResponse = execution.stringResult()
       tableSchema = getResultSetSchema(execution)
       new CommandProcessorResponse(0)

http://git-wip-us.apache.org/repos/asf/spark/blob/c0b47bad/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 27b444d..7e3b872 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -131,12 +131,13 @@ class HiveContext(sc: SparkContext) extends 
SQLContext(sc) {
   @transient protected[hive] lazy val sessionState = {
     val ss = new SessionState(hiveconf)
     set(hiveconf.getAllProperties)  // Have SQLConf pick up the initial set of 
HiveConf.
+    
+    ss.err = new PrintStream(outputBuffer, true, "UTF-8")
+    ss.out = new PrintStream(outputBuffer, true, "UTF-8")
+
     ss
   }
 
-  sessionState.err = new PrintStream(outputBuffer, true, "UTF-8")
-  sessionState.out = new PrintStream(outputBuffer, true, "UTF-8")
-
   override def set(key: String, value: String): Unit = {
     super.set(key, value)
     runSqlHive(s"SET $key=$value")

Reply via email to