Repository: carbondata
Updated Branches:
  refs/heads/master cfb9a9a20 -> 8a5369d2b


[CARBONDATA-2326][Statistics] Fix NPE of statistics when spark.sql.execution.id 
is null

This closes #2151


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8a5369d2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8a5369d2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8a5369d2

Branch: refs/heads/master
Commit: 8a5369d2b4fa889583f25a39dc88a343c92c2260
Parents: cfb9a9a
Author: QiangCai <qiang...@qq.com>
Authored: Tue Apr 10 10:07:53 2018 +0800
Committer: kumarvishal <kumarvishal.1...@gmail.com>
Committed: Tue Apr 10 15:38:35 2018 +0800

----------------------------------------------------------------------
 .../carbondata/spark/rdd/CarbonScanRDD.scala    | 44 ++++++++++----------
 1 file changed, 23 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8a5369d2/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
index 256e43d..df953da 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
@@ -174,27 +174,29 @@ class CarbonScanRDD(
     } finally {
       Profiler.invokeIfEnable {
         val endTime = System.currentTimeMillis()
-        val executionId = 
spark.sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY).toLong
-        Profiler.send(
-          GetPartition(
-            executionId,
-            tableInfo.getDatabaseName + "." + 
tableInfo.getFactTable.getTableName,
-            tablePath,
-            queryId,
-            partitions.length,
-            startTime,
-            endTime,
-            getSplitsStartTime,
-            getSplitsEndTime,
-            numSegments,
-            numStreamSegments,
-            numBlocks,
-            distributeStartTime,
-            distributeEndTime,
-            if (filterExpression == null) "" else 
filterExpression.getStatement,
-            if (columnProjection == null) "" else 
columnProjection.getAllColumns.mkString(",")
+        val executionId = 
spark.sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
+        if (executionId != null) {
+          Profiler.send(
+            GetPartition(
+              executionId.toLong,
+              tableInfo.getDatabaseName + "." + 
tableInfo.getFactTable.getTableName,
+              tablePath,
+              queryId,
+              partitions.length,
+              startTime,
+              endTime,
+              getSplitsStartTime,
+              getSplitsEndTime,
+              numSegments,
+              numStreamSegments,
+              numBlocks,
+              distributeStartTime,
+              distributeEndTime,
+              if (filterExpression == null) "" else 
filterExpression.getStatement,
+              if (columnProjection == null) "" else 
columnProjection.getAllColumns.mkString(",")
+            )
           )
-        )
+        }
       }
     }
   }
@@ -629,7 +631,7 @@ class CarbonScanRDD(
       recorder.recordStatistics(queryStatistic)
       // print executor query statistics for each task_id
       val statistics = recorder.statisticsForTask(taskId, queryStartTime)
-      if (statistics != null) {
+      if (statistics != null && executionId != null) {
         Profiler.invokeIfEnable {
           val inputSplit = split.asInstanceOf[CarbonSparkPartition].split.value
           inputSplit.calculateLength()

Reply via email to