juliuszsompolski commented on a change in pull request #25611: 
[SPARK-28901][SQL] SparkThriftServer's Cancel SQL Operation show it in JDBC Tab 
UI
URL: https://github.com/apache/spark/pull/25611#discussion_r322283492
 
 

 ##########
 File path: 
sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
 ##########
 @@ -201,33 +208,38 @@ private[hive] class SparkExecuteStatementOperation(
         setBackgroundHandle(backgroundHandle)
       } catch {
         case rejected: RejectedExecutionException =>
+          logError("Error submitting query in background, query rejected", 
rejected)
           setState(OperationState.ERROR)
+          HiveThriftServer2.listener.onStatementError(
+            statementId, rejected.getMessage, 
SparkUtils.exceptionString(rejected))
           throw new HiveSQLException("The background threadpool cannot accept" 
+
             " new task for execution, please retry the operation", rejected)
         case NonFatal(e) =>
           logError(s"Error executing query in background", e)
           setState(OperationState.ERROR)
+          HiveThriftServer2.listener.onStatementError(
+            statementId, e.getMessage, SparkUtils.exceptionString(e))
           throw new HiveSQLException(e)
       }
     }
   }
 
   private def execute(): Unit = withSchedulerPool {
-    statementId = UUID.randomUUID().toString
-    logInfo(s"Running query '$statement' with $statementId")
-    setState(OperationState.RUNNING)
-    // Always use the latest class loader provided by executionHive's state.
-    val executionHiveClassLoader = sqlContext.sharedState.jarClassLoader
-    Thread.currentThread().setContextClassLoader(executionHiveClassLoader)
-
-    HiveThriftServer2.listener.onStatementStart(
-      statementId,
-      parentSession.getSessionHandle.getSessionId.toString,
-      statement,
-      statementId,
-      parentSession.getUsername)
-    sqlContext.sparkContext.setJobGroup(statementId, statement)
     try {
+      synchronized {
+        if (getStatus.getState.isTerminal) {
+          logInfo(s"Query with $statementId in terminal state before it 
started running")
+          return
+        } else {
+          logInfo(s"Running query with $statementId")
+          setState(OperationState.RUNNING)
+        }
+      }
+      // Always use the latest class loader provided by executionHive's state.
+      val executionHiveClassLoader = sqlContext.sharedState.jarClassLoader
+      Thread.currentThread().setContextClassLoader(executionHiveClassLoader)
+
+      sqlContext.sparkContext.setJobGroup(statementId, statement)
 
 Review comment:
   @AngersZhuuuu 
   I don't think it will help. It's not just about setJobGroup, but about 
actually starting the job. You can setJobGroup, then cancelJobGroup, and then 
start running Jobs in that Job Group.
   What cancelJobGroup does is only cancel Jobs that are currently running in 
the Job group, it doesn't prevent further jobs being started. So I think it can 
still go past here, and then be cancelled before it actually starts the Jobs.
   I think it would be safer to handle it from the catch block.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to