This is an automated email from the ASF dual-hosted git repository.

manishgupta88 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 86713f5  [CARBONDATA-3241] Refactor the requested scan columns and the 
projection columns
86713f5 is described below

commit 86713f505a80d2a22912b15d65aa008324ad29e4
Author: dhatchayani <dhatcha.offic...@gmail.com>
AuthorDate: Thu Jan 10 15:00:51 2019 +0530

    [CARBONDATA-3241] Refactor the requested scan columns and the projection 
columns
    
    Refactor the requested columns methods by changing both the scan list and 
the projection list together.
    
    This closes #3062
---
 .../execution/strategy/CarbonLateDecodeStrategy.scala | 19 +++++++++++++------
 1 file changed, 13 insertions(+), 6 deletions(-)

diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
index a23a191..0f706af 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
@@ -367,7 +367,7 @@ private[sql] class CarbonLateDecodeStrategy extends 
SparkStrategy {
       // In case of implicit exist we should disable vectorPushRowFilters as 
it goes in IUD flow
       // to get the positionId or tupleID
       var implicitExisted = false
-      val updatedProjects = projects.map {
+      var updatedProjects = projects.map {
           case a@Alias(s: ScalaUDF, name)
             if name.equalsIgnoreCase(CarbonCommonConstants.POSITION_ID) ||
                 
name.equalsIgnoreCase(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID) =>
@@ -388,9 +388,15 @@ private[sql] class CarbonLateDecodeStrategy extends 
SparkStrategy {
             }
           case other => other
       }
+      val updatedColumns: (Seq[Attribute], Seq[Expression]) = 
getRequestedColumns(relation,
+        projectsAttr,
+        filterSet,
+        handledSet,
+        newProjectList,
+        updatedProjects)
       // Don't request columns that are only referenced by pushed filters.
-      val requestedColumns =
-        getRequestedColumns(relation, projectsAttr, filterSet, handledSet, 
newProjectList)
+      val requestedColumns = updatedColumns._1
+      updatedProjects = updatedColumns._2
 
       var updateRequestedColumns =
         if (!vectorPushRowFilters && !implicitExisted && 
!hasDictionaryFilterCols
@@ -449,9 +455,10 @@ private[sql] class CarbonLateDecodeStrategy extends 
SparkStrategy {
       projectsAttr: Seq[Attribute],
       filterSet: AttributeSet,
       handledSet: AttributeSet,
-      newProjectList: Seq[Attribute]) = {
-    (projectsAttr.to[mutable.LinkedHashSet] ++ filterSet -- handledSet)
-      .map(relation.attributeMap).toSeq ++ newProjectList
+      newProjectList: Seq[Attribute],
+      updatedProjects: Seq[Expression]): (Seq[Attribute], Seq[Expression]) = {
+    ((projectsAttr.to[mutable.LinkedHashSet] ++ filterSet -- handledSet)
+       .map(relation.attributeMap).toSeq ++ newProjectList, updatedProjects)
   }
 
   private def getDataSourceScan(relation: LogicalRelation,

Reply via email to