Ritik Raj created ASTERIXDB-3540:
------------------------------------

             Summary: Error in generating pushdown schema for columnar 
collection
                 Key: ASTERIXDB-3540
                 URL: https://issues.apache.org/jira/browse/ASTERIXDB-3540
             Project: Apache AsterixDB
          Issue Type: Bug
          Components: STO - Storage
    Affects Versions: 0.9.10
            Reporter: Ritik Raj
             Fix For: 0.9.10


For the following query, the pushdown schema is wrong.

 
{code:java}
create dataset dsColumn PRIMARY KEY(id: String) WITH {
        "storage-format": {
                "format": "column"
        }
}; {code}
 
{code:java}
upsert into dsColumn({
"id": "1",
"name": "Monica",
"x": {
   "y": {
        "age_field": "age"
     }
},
"t": {
   "r": {
        "p": { "age" : "26" }
     }
}
}); {code}
 
{code:java}
select `field-access-by-name`(t.r.p, x.y.age_field) from dsColumn; {code}
 


The generated plan is


{code:java}
distribute result [$$22] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- DISTRIBUTE_RESULT  |PARTITIONED|
  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
    project ([$$22]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
    -- STREAM_PROJECT  |PARTITIONED|
      assign [$$22] <- [{"$1": 
$$dsColumn.getField("t").getField("r").getField("p").getField("$$dsColumn.getField("x").getField("y").getField("age_field")")}]
 [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
      -- ASSIGN  |PARTITIONED|
        project ([$$dsColumn]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
        -- STREAM_PROJECT  |PARTITIONED|
          exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
            data-scan []<-[$$23, $$dsColumn] <- Default.dsColumn project 
({t:{r:{p:{null:any}}}}) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
            -- DATASOURCE_SCAN  |PARTITIONED|
              exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 
0.0]
                -- EMPTY_TUPLE_SOURCE  |PARTITIONED| {code}
 

on line 13, we can see the calculated expected schema is
{code:java}
({t:{r:{p:{null:any}}}}) {code}
which is wrong. As far I understand, the following should be the expected schema
{code:java}
{t:{r:{p:any}}, x:{y:age_field:any}} {code}

Hence the query is resulting in the below NPE.
{code:java}
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.lang.NullPointerException
        at 
org.apache.hyracks.api.exceptions.HyracksDataException.create(HyracksDataException.java:70)
 ~[classes/:?]
        at 
org.apache.hyracks.api.util.ExceptionUtils.setNodeIds(ExceptionUtils.java:70) 
~[classes/:?]
        at org.apache.hyracks.control.nc.Task.run(Task.java:399) ~[classes/:?]
        at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
 [?:?]
        at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
 [?:?]
        at java.base/java.lang.Thread.run(Thread.java:842) [?:?]
Caused by: java.lang.NullPointerException
        at 
org.apache.asterix.om.base.AMutableString.setValue(AMutableString.java:31) 
~[classes/:?]
        at 
org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.serializeFieldName(AbstractFieldNamesDictionary.java:79)
 ~[classes/:?]
        at 
org.apache.asterix.column.metadata.dictionary.FieldNamesTrieDictionary.getFieldNameIndex(FieldNamesTrieDictionary.java:68)
 ~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:73)
 ~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:42)
 ~[classes/:?]
        at org.apache.asterix.om.types.ARecordType.accept(ARecordType.java:428) 
~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:79)
 ~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:42)
 ~[classes/:?]
        at org.apache.asterix.om.types.ARecordType.accept(ARecordType.java:428) 
~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:79)
 ~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:42)
 ~[classes/:?]
        at org.apache.asterix.om.types.ARecordType.accept(ARecordType.java:428) 
~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:79)
 ~[classes/:?]
        at 
org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor.visit(SchemaClipperVisitor.java:42)
 ~[classes/:?]
        at org.apache.asterix.om.types.ARecordType.accept(ARecordType.java:428) 
~[classes/:?]
        at 
org.apache.asterix.column.operation.query.QueryColumnMetadata.clip(QueryColumnMetadata.java:246)
 ~[classes/:?]
        at 
org.apache.asterix.column.operation.query.QueryColumnMetadata.create(QueryColumnMetadata.java:204)
 ~[classes/:?]
        at 
org.apache.asterix.column.operation.query.QueryColumnTupleProjector.createProjectionInfo(QueryColumnTupleProjector.java:76)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.btree.column.impls.lsm.LSMColumnBTreeOpContext.createProjectionInfo(LSMColumnBTreeOpContext.java:71)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.btree.column.impls.lsm.LSMColumnBTreeRangeSearchCursor.createAccessor(LSMColumnBTreeRangeSearchCursor.java:65)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTreeRangeSearchCursor.doOpen(LSMBTreeRangeSearchCursor.java:406)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.common.EnforcedIndexCursor.open(EnforcedIndexCursor.java:54)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTreeSearchCursor.doOpen(LSMBTreeSearchCursor.java:62)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.common.EnforcedIndexCursor.open(EnforcedIndexCursor.java:54)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree.search(LSMBTree.java:219)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.common.impls.LSMHarness.search(LSMHarness.java:451)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor.search(LSMTreeIndexAccessor.java:119)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.searchAllPartitions(IndexSearchOperatorNodePushable.java:469)
 ~[classes/:?]
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.nextFrame(IndexSearchOperatorNodePushable.java:316)
 ~[classes/:?]
        at 
org.apache.hyracks.api.dataflow.EnforceFrameWriter.nextFrame(EnforceFrameWriter.java:76)
 ~[classes/:?]
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.write(AbstractFrameAppender.java:94)
 ~[classes/:?]
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.open(EmptyTupleSourceRuntimeFactory.java:55)
 ~[classes/:?]
        at 
org.apache.hyracks.api.dataflow.EnforceFrameWriter.open(EnforceFrameWriter.java:59)
 ~[classes/:?]
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$SourcePushRuntime.initialize(AlgebricksMetaOperatorDescriptor.java:175)
 ~[classes/:?]
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$runInParallel$0(SuperActivityOperatorNodePushable.java:233)
 ~[classes/:?]
        at 
java.base/java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:264) 
[?:?]
        at java.base/java.util.concurrent.FutureTask.run(FutureTask.java) [?:?]
        ... 3 more {code}
 



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to