Chetan Bhat created CARBONDATA-4243:
---------------------------------------

             Summary: Select filter query with to_date in filter fails for 
table with column_meta_cache configured also having SI
                 Key: CARBONDATA-4243
                 URL: https://issues.apache.org/jira/browse/CARBONDATA-4243
             Project: CarbonData
          Issue Type: Bug
          Components: sql
    Affects Versions: 2.2.0
         Environment: Spark 3.1.1
            Reporter: Chetan Bhat


Create table with column_meta_cache, create secondary indexes and load data to 
table. 

Execute the Select filter query with to_date in filter.

CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, 
DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
decimal(36,36),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
int) stored as carbondata 
TBLPROPERTIES('COLUMN_META_CACHE'='CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ');
CREATE INDEX indextable2 ON TABLE uniqdata (DOB) AS 'carbondata';
CREATE INDEX indextable3 ON TABLE uniqdata (DOJ) AS 'carbondata';
LOAD DATA INPATH 'hdfs://hacluster/chetan/2000_UniqData.csv' into table 
uniqdata OPTIONS('DELIMITER'=',' , 
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1');

 

Issue: Select filter query with to_date in filter fails for table with 
column_meta_cache configured also having SI

0: jdbc:hive2://10.21.19.14:23040/default> select 
max(to_date(DOB)),min(to_date(DOB)),count(to_date(DOB)) from uniqdata where 
to_date(DOB)='1975-06-11' or to_date(Dn select 
max(to_date(DOB)),min(to_date(DOB)),count(to_date(DOB)) from uniqdata where 
to_date(DOB)='1975-06-11' or to_date(DOB)='1975-06-23';
Error: org.apache.hive.service.cli.HiveSQLException: Error running query: 
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: makeCopy, tree:
!BroadCastSIFilterPushJoin [none#0], [none#1], Inner, BuildRight
:- *(6) ColumnarToRow
: +- Scan CarbonDatasourceHadoopRelation chetan.uniqdata[dob#847024] Batched: 
true, DirectScan: false, PushedFilters: [((cast(input[0] as date) = 1987) or 
(cast(in9))], ReadSchema: [dob]
+- *(8) HashAggregate(keys=[positionReference#847161], functions=[], 
output=[positionReference#847161])
 +- ReusedExchange [positionReference#847161], Exchange 
hashpartitioning(positionReference#847161, 200), ENSURE_REQUIREMENTS, 
[id=#195473]

at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(Sparation.scala:361)
 at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.$anonfun$run$2(SparkExecuteStatementOperation.scala:263)
 at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
 at 
org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties(SparkOperation.scala:78)
 at 
org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties$(SparkOperation.scala:62)
 at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withLocalProperties(SparkExecuteStatementOperation.scala:43)
 at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:263)
 at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:258)
 at java.security.AccessController.doPrivileged(Native Method)
 at javax.security.auth.Subject.doAs(Subject.java:422)
 at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1746)
 at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2.run(SparkExecuteStatementOperation.scala:272)
 at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
 at java.util.concurrent.FutureTask.run(FutureTask.java:266)
 at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
 at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
 at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: 
makeCopy, tree:
!BroadCastSIFilterPushJoin [none#0], [none#1], Inner, BuildRight
:- *(6) ColumnarToRow
: +- Scan CarbonDatasourceHadoopRelation chetan.uniqdata[dob#847024] Batched: 
true, DirectScan: false, PushedFilters: [((cast(input[0] as date) = 1987) or 
(cast(in9))], ReadSchema: [dob]
+- *(8) HashAggregate(keys=[positionReference#847161], functions=[], 
output=[positionReference#847161])
 +- ReusedExchange [positionReference#847161], Exchange 
hashpartitioning(positionReference#847161, 200), ENSURE_REQUIREMENTS, 
[id=#195473]

at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
 at org.apache.spark.sql.catalyst.trees.TreeNode.makeCopy(TreeNode.scala:468)
 at org.apache.spark.sql.catalyst.trees.TreeNode.makeCopy(TreeNode.scala:457)
 at org.apache.spark.sql.execution.SparkPlan.makeCopy(SparkPlan.scala:87)
 at org.apache.spark.sql.execution.SparkPlan.makeCopy(SparkPlan.scala:58)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.withNewChildren(TreeNode.scala:294)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.doCanonicalize(QueryPlan.scala:405)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized$lzycompute(QueryPlan.scala:373)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized(QueryPlan.scala:372)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.$anonfun$doCanonicalize$1(QueryPlan.scala:387)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at scala.collection.TraversableLike.map(TraversableLike.scala:238)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
 at scala.collection.immutable.List.map(List.scala:298)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.doCanonicalize(QueryPlan.scala:387)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized$lzycompute(QueryPlan.scala:373)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized(QueryPlan.scala:372)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.$anonfun$doCanonicalize$1(QueryPlan.scala:387)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at scala.collection.TraversableLike.map(TraversableLike.scala:238)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
 at scala.collection.immutable.List.map(List.scala:298)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.doCanonicalize(QueryPlan.scala:387)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized$lzycompute(QueryPlan.scala:373)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized(QueryPlan.scala:372)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.$anonfun$doCanonicalize$1(QueryPlan.scala:387)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at scala.collection.TraversableLike.map(TraversableLike.scala:238)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
 at scala.collection.immutable.List.map(List.scala:298)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.doCanonicalize(QueryPlan.scala:387)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized$lzycompute(QueryPlan.scala:373)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized(QueryPlan.scala:372)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.$anonfun$doCanonicalize$1(QueryPlan.scala:387)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at scala.collection.TraversableLike.map(TraversableLike.scala:238)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
 at scala.collection.immutable.List.map(List.scala:298)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.doCanonicalize(QueryPlan.scala:387)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized$lzycompute(QueryPlan.scala:373)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.canonicalized(QueryPlan.scala:372)
 at 
org.apache.spark.sql.catalyst.plans.QueryPlan.sameResult(QueryPlan.scala:420)
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$$anonfun$org$apache$spark$sql$execution$exchange$ReuseExchange$$reuse$1$1.$anonfun$applyOrElse$3(Ex
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$$anonfun$org$apache$spark$sql$execution$exchange$ReuseExchange$$reuse$1$1.$anonfun$applyOrElse$3$ad115)
 at 
scala.collection.IndexedSeqOptimized.$anonfun$find$1(IndexedSeqOptimized.scala:53)
 at 
scala.collection.IndexedSeqOptimized.$anonfun$find$1$adapted(IndexedSeqOptimized.scala:53)
 at 
scala.collection.IndexedSeqOptimized.segmentLength(IndexedSeqOptimized.scala:198)
 at 
scala.collection.IndexedSeqOptimized.segmentLength$(IndexedSeqOptimized.scala:195)
 at scala.collection.mutable.ArrayBuffer.segmentLength(ArrayBuffer.scala:49)
 at scala.collection.GenSeqLike.prefixLength(GenSeqLike.scala:98)
 at scala.collection.GenSeqLike.prefixLength$(GenSeqLike.scala:98)
 at scala.collection.AbstractSeq.prefixLength(Seq.scala:45)
 at scala.collection.IndexedSeqOptimized.find(IndexedSeqOptimized.scala:53)
 at scala.collection.IndexedSeqOptimized.find$(IndexedSeqOptimized.scala:52)
 at scala.collection.mutable.ArrayBuffer.find(ArrayBuffer.scala:49)
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$$anonfun$org$apache$spark$sql$execution$exchange$ReuseExchange$$reuse$1$1.applyOrElse(Exchange.scal
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$$anonfun$org$apache$spark$sql$execution$exchange$ReuseExchange$$reuse$1$1.applyOrElse(Exchange.scal
 at 
scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$$anonfun$apply$1.applyOrElse(Exchange.scala:129)
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$$anonfun$apply$1.applyOrElse(Exchange.scala:128)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$4(TreeNode.scala:345)
 at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:345)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChild$2(TreeNode.scala:376)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$4(TreeNode.scala:437)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at scala.collection.TraversableLike.map(TraversableLike.scala:238)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
 at scala.collection.immutable.List.map(List.scala:298)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:437)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUp$1(TreeNode.scala:338)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
 at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:338)
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$.apply(Exchange.scala:128)
 at 
org.apache.spark.sql.execution.exchange.ReuseExchange$.apply(Exchange.scala:102)
 at 
org.apache.spark.sql.execution.QueryExecution$.$anonfun$prepareForExecution$1(QueryExecution.scala:372)
 at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
 at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
 at scala.collection.immutable.List.foldLeft(List.scala:89)
 at 
org.apache.spark.sql.execution.QueryExecution$.prepareForExecution(QueryExecution.scala:371)
 at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:117)
 at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
 at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772)
 at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143)
 at 
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:117)
 at 
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110)
 at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$writePlans$5(QueryExecution.scala:225)
 at org.apache.spark.sql.catalyst.plans.QueryPlan$.append(QueryPlan.scala:487)
 at 
org.apache.spark.sql.execution.QueryExecution.writePlans(QueryExecution.scala:225)
 at 
org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:240)
 at 
org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:233)
 at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(Sparation.scala:328)
 ... 16 more
Caused by: java.lang.reflect.InvocationTargetException
 at sun.reflect.GeneratedConstructorAccessor757.newInstance(Unknown Source)
 at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
 at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$makeCopy$7(TreeNode.scala:501)
 at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$makeCopy$1(TreeNode.scala:500)
 at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
 ... 175 more
Caused by: java.lang.NullPointerException
 at 
org.apache.spark.sql.CarbonDataSourceScanHelper.indexFilter$lzycompute(CarbonDataSourceScanHelper.scala:62)
 at 
org.apache.spark.sql.CarbonDataSourceScanHelper.indexFilter(CarbonDataSourceScanHelper.scala:61)
 at 
org.apache.spark.sql.CarbonDataSourceScanHelper.inputRDD$lzycompute(CarbonDataSourceScanHelper.scala:100)
 at 
org.apache.spark.sql.CarbonDataSourceScanHelper.inputRDD(CarbonDataSourceScanHelper.scala:96)
 at 
org.apache.spark.sql.execution.strategy.CarbonDataSourceScan.inputRDDs(CarbonDataSourceScan.scala:99)
 at 
org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin$$anonfun$1.applyOrElse(BroadCastSIFilterPushJoin.scala:126)
 at 
org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin$$anonfun$1.applyOrElse(BroadCastSIFilterPushJoin.scala:125)
 at scala.PartialFunction$Lifted.apply(PartialFunction.scala:228)
 at scala.PartialFunction$Lifted.apply(PartialFunction.scala:224)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.collectFirst(TreeNode.scala:231)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$3(TreeNode.scala:232)
 at scala.Option.orElse(Option.scala:447)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$2(TreeNode.scala:232)
 at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
 at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
 at scala.collection.immutable.List.foldLeft(List.scala:89)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$1(TreeNode.scala:232)
 at scala.Option.orElse(Option.scala:447)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.collectFirst(TreeNode.scala:232)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$3(TreeNode.scala:232)
 at scala.Option.orElse(Option.scala:447)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$2(TreeNode.scala:232)
 at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
 at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
 at scala.collection.immutable.List.foldLeft(List.scala:89)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$1(TreeNode.scala:232)
 at scala.Option.orElse(Option.scala:447)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.collectFirst(TreeNode.scala:232)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$3(TreeNode.scala:232)
 at scala.Option.orElse(Option.scala:447)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$2(TreeNode.scala:232)
 at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
 at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
 at scala.collection.immutable.List.foldLeft(List.scala:89)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$collectFirst$1(TreeNode.scala:232)
 at scala.Option.orElse(Option.scala:447)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.collectFirst(TreeNode.scala:232)
 at 
org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin.<init>(BroadCastSIFilterPushJoin.scala:125)
 ... 182 more (state=,code=0)



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to