[ 
https://issues.apache.org/jira/browse/DRILL-1250?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Andy Pernsteiner updated DRILL-1250:
------------------------------------

    Description: 
After fighting DRILL-1239, I decided to remove ALL null values from all arrays 
in my nested JSON file.  Here's a snippet of the data:

{code}

{"trans_id":49995,"date":"07/30/2013","time":"17:29:20","user_info":{"cust_id":6112,"device":"IOS5","state":"tx"},"marketing_info":{"camp_id":7,"searchwords":["sad"]},"trans_info":{"prod_id":[338],"purch_flag":"false"}}
{"trans_id":49996,"date":"05/18/2013","time":"22:34:58","user_info":{"cust_id":85,"device":"AOS4.2","state":"pa"},"marketing_info":{"camp_id":2,"searchwords":["happy"]},"trans_info":{"prod_id":[143,330,65,12],"purch_flag":"false"}}
{"trans_id":49997,"date":"08/04/2012","time":"13:19:58","user_info":{"cust_id":14,"device":"AOS4.4","state":"tx"},"marketing_info":{"camp_id":10,"searchwords":["none"]},"trans_info":{"prod_id":[0],"purch_flag":"false"}}
{"trans_id":49998,"date":"07/19/2013","time":"21:33:09","user_info":{"cust_id":168,"device":"IOS5","state":"tx"},"marketing_info":{"camp_id":7,"searchwords":["sad"]},"trans_info":{"prod_id":[0],"purch_flag":"true"}}
{"trans_id":49999,"date":"01/29/2013","time":"15:24:21","user_info":{"cust_id":9780,"device":"IOS5","state":"ca"},"marketing_info":{"camp_id":1,"searchwords":["none"]},"trans_info":{"prod_id":[30,149],"purch_flag":"false"}}

{code}

If I run the following select query I get the error below:

 select t.trans_id, t.user_info.cust_id as cust_id, 
 t.user_info.device as device, t.user_info.state as state, 
 t.marketing_info.camp_id as camp_id, t.marketing_info.searchwords, 
 t.trans_info.prod_id as prod_id, t.trans_info.purch_flag as purch_flag from 
 click.`json_large`.`/nested.json` t;




{code}
0: jdbc:drill:>  select t.trans_id, t.user_info.cust_id as cust_id, 
. . . . . . . >  t.user_info.device as device, t.user_info.state as state, 
. . . . . . . >  t.marketing_info.camp_id as camp_id, 
t.marketing_info.searchwords, 
. . . . . . . >  t.trans_info.prod_id as prod_id, t.trans_info.purch_flag as 
purch_flag from 
. . . . . . . >  click.`json_large`.`/nested.json` t;
Query failed: Screen received stop request sent. Line 156, Column 32: A method 
named "copyAsValue" is not declared in any enclosing class nor any supertype, 
nor through a static import [d1668742-dd28-477d-a941-d03f0226d4e4]
Node details: ip-172-16-1-111:31011/31012
org.apache.drill.exec.exception.SchemaChangeException: Failure while attempting 
to load generated class
        at 
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:371)
        at 
org.apache.drill.exec.record.AbstractSingleRecordBatch.innerNext(AbstractSingleRecordBatch.java:66)
        at 
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext(ProjectRecordBatch.java:120)
        at 
org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:95)
        at 
org.apache.drill.exec.physical.impl.validate.IteratorValidatorBatchIterator.next(IteratorValidatorBatchIterator.java:116)
        at 
org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:59)
        at 
org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:98)
        at 
org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:49)
        at 
org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:116)
        at 
org.apache.drill.exec.work.WorkManager$RunnableWrapper.run(WorkManager.java:250)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.drill.exec.exception.ClassTransformationException: 
Failure generating transformation classes for value: 
 
package org.apache.drill.exec.test.generated;

import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.holders.BitHolder;
import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
import org.apache.drill.exec.expr.holders.RepeatedVarCharHolder;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.vector.NullableVarCharVector;
import org.apache.drill.exec.vector.RepeatedVarCharVector;
import org.apache.drill.exec.vector.complex.impl.RepeatedVarCharWriterImpl;
import org.apache.drill.exec.vector.complex.writer.VarCharWriter;

public class ProjectorGen302 {

    NullableVarCharVector vv0;
    NullableVarCharVector vv4;
    NullableVarCharVector vv8;
    NullableVarCharVector vv12;
    RepeatedVarCharVector vv16;
    RepeatedVarCharVector vv20;
    VarCharWriter writer23;
    NullableVarCharVector vv25;
    NullableVarCharVector vv29;

    public void doSetup(FragmentContext context, RecordBatch incoming, 
RecordBatch outgoing)
        throws SchemaChangeException
    {
        {
            int[] fieldIds1 = new int[ 2 ] ;
            fieldIds1 [ 0 ] = 3;
            fieldIds1 [ 1 ] = 1;
            Object tmp2 = 
(incoming).getValueAccessorById(NullableVarCharVector.class, 
fieldIds1).getValueVector();
            if (tmp2 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv0 with id: org.apache.drill.exec.record.TypedFieldId@7dc6bfa3.");
            }
            vv0 = ((NullableVarCharVector) tmp2);
            int[] fieldIds5 = new int[ 1 ] ;
            fieldIds5 [ 0 ] = 2;
            Object tmp6 = 
(outgoing).getValueAccessorById(NullableVarCharVector.class, 
fieldIds5).getValueVector();
            if (tmp6 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv4 with id: org.apache.drill.exec.record.TypedFieldId@adcdf4e1.");
            }
            vv4 = ((NullableVarCharVector) tmp6);
        }
        {
            int[] fieldIds9 = new int[ 2 ] ;
            fieldIds9 [ 0 ] = 3;
            fieldIds9 [ 1 ] = 2;
            Object tmp10 = 
(incoming).getValueAccessorById(NullableVarCharVector.class, 
fieldIds9).getValueVector();
            if (tmp10 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv8 with id: org.apache.drill.exec.record.TypedFieldId@7f7b9842.");
            }
            vv8 = ((NullableVarCharVector) tmp10);
            int[] fieldIds13 = new int[ 1 ] ;
            fieldIds13 [ 0 ] = 3;
            Object tmp14 = 
(outgoing).getValueAccessorById(NullableVarCharVector.class, 
fieldIds13).getValueVector();
            if (tmp14 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv12 with id: org.apache.drill.exec.record.TypedFieldId@af82cd80.");
            }
            vv12 = ((NullableVarCharVector) tmp14);
        }
        {
            int[] fieldIds17 = new int[ 2 ] ;
            fieldIds17 [ 0 ] = 4;
            fieldIds17 [ 1 ] = 1;
            Object tmp18 = 
(incoming).getValueAccessorById(RepeatedVarCharVector.class, 
fieldIds17).getValueVector();
            if (tmp18 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv16 with id: org.apache.drill.exec.record.TypedFieldId@b5f8abde.");
            }
            vv16 = ((RepeatedVarCharVector) tmp18);
            int[] fieldIds21 = new int[ 1 ] ;
            fieldIds21 [ 0 ] = 5;
            Object tmp22 = 
(outgoing).getValueAccessorById(RepeatedVarCharVector.class, 
fieldIds21).getValueVector();
            if (tmp22 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv20 with id: org.apache.drill.exec.record.TypedFieldId@b6382fb8.");
            }
            vv20 = ((RepeatedVarCharVector) tmp22);
            writer23 = new RepeatedVarCharWriterImpl(vv20, null);
        }
        {
            int[] fieldIds26 = new int[ 2 ] ;
            fieldIds26 [ 0 ] = 5;
            fieldIds26 [ 1 ] = 1;
            Object tmp27 = 
(incoming).getValueAccessorById(NullableVarCharVector.class, 
fieldIds26).getValueVector();
            if (tmp27 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv25 with id: org.apache.drill.exec.record.TypedFieldId@e7933625.");
            }
            vv25 = ((NullableVarCharVector) tmp27);
            int[] fieldIds30 = new int[ 1 ] ;
            fieldIds30 [ 0 ] = 7;
            Object tmp31 = 
(outgoing).getValueAccessorById(NullableVarCharVector.class, 
fieldIds30).getValueVector();
            if (tmp31 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv29 with id: org.apache.drill.exec.record.TypedFieldId@b6562ffc.");
            }
            vv29 = ((NullableVarCharVector) tmp31);
        }
    }

    public boolean doEval(int inIndex, int outIndex)
        throws SchemaChangeException
    {
        {
            NullableVarCharHolder out3 = new NullableVarCharHolder();
            out3 .isSet = vv0 .getAccessor().isSet((inIndex));
            if (out3 .isSet == 1) {
                {
                    vv0 .getAccessor().get((inIndex), out3);
                }
            }
            BitHolder out7 = new BitHolder();
            out7 .value = 1;
            if (!(out3 .isSet == 0)) {
                if (!vv4 .getMutator().setSafe((outIndex), out3)) {
                    out7 .value = 0;
                }
            } else {
                if (!vv4 .getMutator().isSafe((outIndex))) {
                    out7 .value = 0;
                }
            }
            if (out7 .value == 0) {
                return false;
            }
        }
        {
            NullableVarCharHolder out11 = new NullableVarCharHolder();
            out11 .isSet = vv8 .getAccessor().isSet((inIndex));
            if (out11 .isSet == 1) {
                {
                    vv8 .getAccessor().get((inIndex), out11);
                }
            }
            BitHolder out15 = new BitHolder();
            out15 .value = 1;
            if (!(out11 .isSet == 0)) {
                if (!vv12 .getMutator().setSafe((outIndex), out11)) {
                    out15 .value = 0;
                }
            } else {
                if (!vv12 .getMutator().isSafe((outIndex))) {
                    out15 .value = 0;
                }
            }
            if (out15 .value == 0) {
       Error: exception while executing query: Failure while trying to get next 
result batch. (state=,code=0)         return false;
            }
        }
        {
            RepeatedVarCharHolder out19 = new RepeatedVarCharHolder();
            {
                vv16 .getAccessor().get((inIndex), out19);
            }
            writer23 .setPosition((outIndex));
            out19 .copyAsValue(writer23);
            BitHolder out24 = new BitHolder();
            if (writer23 .ok()) {
                out24 .value = 1;
            } else {
                out24 .value = 0;
            }
            if (out24 .value == 0) {
                return false;
            }
        }
        {
            NullableVarCharHolder out28 = new NullableVarCharHolder();
            out28 .isSet = vv25 .getAccessor().isSet((inIndex));
            if (out28 .isSet == 1) {
                {
                    vv25 .getAccessor().get((inIndex), out28);
                }
            }
            BitHolder out32 = new BitHolder();
            out32 .value = 1;
            if (!(out28 .isSet == 0)) {
                if (!vv29 .getMutator().setSafe((outIndex), out28)) {
                    out32 .value = 0;
                }
            } else {
                if (!vv29 .getMutator().isSafe((outIndex))) {
                    out32 .value = 0;
                }
            }
            if (out32 .value == 0) {
                return false;
            }
        }
        {
            return true;
        }
    }

}

        at 
org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:295)
        at 
org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:187)
        at 
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:368)
        ... 12 more
Caused by: org.codehaus.commons.compiler.CompileException: Line 156, Column 32: 
A method named "copyAsValue" is not declared in any enclosing class nor any 
supertype, nor through a static import
        at 
org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10056)
        at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:7298)
        at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3860)
        at org.codehaus.janino.UnitCompiler.access$6900(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$10.visitMethodInvocation(UnitCompiler.java:3261)
        at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:3978)
        at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3288)
        at 
org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4354)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2662)
        at org.codehaus.janino.UnitCompiler.access$4400(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$7.visitMethodInvocation(UnitCompiler.java:2627)
        at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:3978)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2654)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1634)
        at org.codehaus.janino.UnitCompiler.access$1100(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$4.visitExpressionStatement(UnitCompiler.java:940)
        at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2085)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:962)
        at 
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1004)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:989)
        at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:182)
        at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:939)
        at org.codehaus.janino.Java$Block.accept(Java.java:2005)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:962)
        at 
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1004)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2284)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:826)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:798)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:503)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:389)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:343)
        at 
org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1136)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:350)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:318)
        at 
org.apache.drill.exec.compile.JaninoClassCompiler.getByteCode(JaninoClassCompiler.java:49)
        at 
org.apache.drill.exec.compile.AbstractClassCompiler.getClassByteCode(AbstractClassCompiler.java:39)
        at 
org.apache.drill.exec.compile.QueryClassLoader$ClassCompilerSelector.getClassByteCode(QueryClassLoader.java:140)
        at 
org.apache.drill.exec.compile.QueryClassLoader$ClassCompilerSelector.access$000(QueryClassLoader.java:109)
        at 
org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:102)
        at 
org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:254)
        ... 14 more
{code}

  was:
After fighting DRILL-1239, I decided to remove ALL null values from all arrays 
in my nested JSON file.  Here's a snippet of the data:

{"trans_id":49995,"date":"07/30/2013","time":"17:29:20","user_info":{"cust_id":6112,"device":"IOS5","state":"tx"},"marketing_info":{"camp_id":7,"searchwords":["sad"]},"trans_info":{"prod_id":[338],"purch_flag":"false"}}
{"trans_id":49996,"date":"05/18/2013","time":"22:34:58","user_info":{"cust_id":85,"device":"AOS4.2","state":"pa"},"marketing_info":{"camp_id":2,"searchwords":["happy"]},"trans_info":{"prod_id":[143,330,65,12],"purch_flag":"false"}}
{"trans_id":49997,"date":"08/04/2012","time":"13:19:58","user_info":{"cust_id":14,"device":"AOS4.4","state":"tx"},"marketing_info":{"camp_id":10,"searchwords":["none"]},"trans_info":{"prod_id":[0],"purch_flag":"false"}}
{"trans_id":49998,"date":"07/19/2013","time":"21:33:09","user_info":{"cust_id":168,"device":"IOS5","state":"tx"},"marketing_info":{"camp_id":7,"searchwords":["sad"]},"trans_info":{"prod_id":[0],"purch_flag":"true"}}
{"trans_id":49999,"date":"01/29/2013","time":"15:24:21","user_info":{"cust_id":9780,"device":"IOS5","state":"ca"},"marketing_info":{"camp_id":1,"searchwords":["none"]},"trans_info":{"prod_id":[30,149],"purch_flag":"false"}}


If I run the following select query I get the error below:

 select t.trans_id, t.user_info.cust_id as cust_id, 
 t.user_info.device as device, t.user_info.state as state, 
 t.marketing_info.camp_id as camp_id, t.marketing_info.searchwords, 
 t.trans_info.prod_id as prod_id, t.trans_info.purch_flag as purch_flag from 
 click.`json_large`.`/nested.json` t;




{code}
0: jdbc:drill:>  select t.trans_id, t.user_info.cust_id as cust_id, 
. . . . . . . >  t.user_info.device as device, t.user_info.state as state, 
. . . . . . . >  t.marketing_info.camp_id as camp_id, 
t.marketing_info.searchwords, 
. . . . . . . >  t.trans_info.prod_id as prod_id, t.trans_info.purch_flag as 
purch_flag from 
. . . . . . . >  click.`json_large`.`/nested.json` t;
Query failed: Screen received stop request sent. Line 156, Column 32: A method 
named "copyAsValue" is not declared in any enclosing class nor any supertype, 
nor through a static import [d1668742-dd28-477d-a941-d03f0226d4e4]
Node details: ip-172-16-1-111:31011/31012
org.apache.drill.exec.exception.SchemaChangeException: Failure while attempting 
to load generated class
        at 
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:371)
        at 
org.apache.drill.exec.record.AbstractSingleRecordBatch.innerNext(AbstractSingleRecordBatch.java:66)
        at 
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext(ProjectRecordBatch.java:120)
        at 
org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:95)
        at 
org.apache.drill.exec.physical.impl.validate.IteratorValidatorBatchIterator.next(IteratorValidatorBatchIterator.java:116)
        at 
org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:59)
        at 
org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:98)
        at 
org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:49)
        at 
org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:116)
        at 
org.apache.drill.exec.work.WorkManager$RunnableWrapper.run(WorkManager.java:250)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.drill.exec.exception.ClassTransformationException: 
Failure generating transformation classes for value: 
 
package org.apache.drill.exec.test.generated;

import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.holders.BitHolder;
import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
import org.apache.drill.exec.expr.holders.RepeatedVarCharHolder;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.vector.NullableVarCharVector;
import org.apache.drill.exec.vector.RepeatedVarCharVector;
import org.apache.drill.exec.vector.complex.impl.RepeatedVarCharWriterImpl;
import org.apache.drill.exec.vector.complex.writer.VarCharWriter;

public class ProjectorGen302 {

    NullableVarCharVector vv0;
    NullableVarCharVector vv4;
    NullableVarCharVector vv8;
    NullableVarCharVector vv12;
    RepeatedVarCharVector vv16;
    RepeatedVarCharVector vv20;
    VarCharWriter writer23;
    NullableVarCharVector vv25;
    NullableVarCharVector vv29;

    public void doSetup(FragmentContext context, RecordBatch incoming, 
RecordBatch outgoing)
        throws SchemaChangeException
    {
        {
            int[] fieldIds1 = new int[ 2 ] ;
            fieldIds1 [ 0 ] = 3;
            fieldIds1 [ 1 ] = 1;
            Object tmp2 = 
(incoming).getValueAccessorById(NullableVarCharVector.class, 
fieldIds1).getValueVector();
            if (tmp2 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv0 with id: org.apache.drill.exec.record.TypedFieldId@7dc6bfa3.");
            }
            vv0 = ((NullableVarCharVector) tmp2);
            int[] fieldIds5 = new int[ 1 ] ;
            fieldIds5 [ 0 ] = 2;
            Object tmp6 = 
(outgoing).getValueAccessorById(NullableVarCharVector.class, 
fieldIds5).getValueVector();
            if (tmp6 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv4 with id: org.apache.drill.exec.record.TypedFieldId@adcdf4e1.");
            }
            vv4 = ((NullableVarCharVector) tmp6);
        }
        {
            int[] fieldIds9 = new int[ 2 ] ;
            fieldIds9 [ 0 ] = 3;
            fieldIds9 [ 1 ] = 2;
            Object tmp10 = 
(incoming).getValueAccessorById(NullableVarCharVector.class, 
fieldIds9).getValueVector();
            if (tmp10 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv8 with id: org.apache.drill.exec.record.TypedFieldId@7f7b9842.");
            }
            vv8 = ((NullableVarCharVector) tmp10);
            int[] fieldIds13 = new int[ 1 ] ;
            fieldIds13 [ 0 ] = 3;
            Object tmp14 = 
(outgoing).getValueAccessorById(NullableVarCharVector.class, 
fieldIds13).getValueVector();
            if (tmp14 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv12 with id: org.apache.drill.exec.record.TypedFieldId@af82cd80.");
            }
            vv12 = ((NullableVarCharVector) tmp14);
        }
        {
            int[] fieldIds17 = new int[ 2 ] ;
            fieldIds17 [ 0 ] = 4;
            fieldIds17 [ 1 ] = 1;
            Object tmp18 = 
(incoming).getValueAccessorById(RepeatedVarCharVector.class, 
fieldIds17).getValueVector();
            if (tmp18 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv16 with id: org.apache.drill.exec.record.TypedFieldId@b5f8abde.");
            }
            vv16 = ((RepeatedVarCharVector) tmp18);
            int[] fieldIds21 = new int[ 1 ] ;
            fieldIds21 [ 0 ] = 5;
            Object tmp22 = 
(outgoing).getValueAccessorById(RepeatedVarCharVector.class, 
fieldIds21).getValueVector();
            if (tmp22 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv20 with id: org.apache.drill.exec.record.TypedFieldId@b6382fb8.");
            }
            vv20 = ((RepeatedVarCharVector) tmp22);
            writer23 = new RepeatedVarCharWriterImpl(vv20, null);
        }
        {
            int[] fieldIds26 = new int[ 2 ] ;
            fieldIds26 [ 0 ] = 5;
            fieldIds26 [ 1 ] = 1;
            Object tmp27 = 
(incoming).getValueAccessorById(NullableVarCharVector.class, 
fieldIds26).getValueVector();
            if (tmp27 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv25 with id: org.apache.drill.exec.record.TypedFieldId@e7933625.");
            }
            vv25 = ((NullableVarCharVector) tmp27);
            int[] fieldIds30 = new int[ 1 ] ;
            fieldIds30 [ 0 ] = 7;
            Object tmp31 = 
(outgoing).getValueAccessorById(NullableVarCharVector.class, 
fieldIds30).getValueVector();
            if (tmp31 == null) {
                throw new SchemaChangeException("Failure while loading vector 
vv29 with id: org.apache.drill.exec.record.TypedFieldId@b6562ffc.");
            }
            vv29 = ((NullableVarCharVector) tmp31);
        }
    }

    public boolean doEval(int inIndex, int outIndex)
        throws SchemaChangeException
    {
        {
            NullableVarCharHolder out3 = new NullableVarCharHolder();
            out3 .isSet = vv0 .getAccessor().isSet((inIndex));
            if (out3 .isSet == 1) {
                {
                    vv0 .getAccessor().get((inIndex), out3);
                }
            }
            BitHolder out7 = new BitHolder();
            out7 .value = 1;
            if (!(out3 .isSet == 0)) {
                if (!vv4 .getMutator().setSafe((outIndex), out3)) {
                    out7 .value = 0;
                }
            } else {
                if (!vv4 .getMutator().isSafe((outIndex))) {
                    out7 .value = 0;
                }
            }
            if (out7 .value == 0) {
                return false;
            }
        }
        {
            NullableVarCharHolder out11 = new NullableVarCharHolder();
            out11 .isSet = vv8 .getAccessor().isSet((inIndex));
            if (out11 .isSet == 1) {
                {
                    vv8 .getAccessor().get((inIndex), out11);
                }
            }
            BitHolder out15 = new BitHolder();
            out15 .value = 1;
            if (!(out11 .isSet == 0)) {
                if (!vv12 .getMutator().setSafe((outIndex), out11)) {
                    out15 .value = 0;
                }
            } else {
                if (!vv12 .getMutator().isSafe((outIndex))) {
                    out15 .value = 0;
                }
            }
            if (out15 .value == 0) {
       Error: exception while executing query: Failure while trying to get next 
result batch. (state=,code=0)         return false;
            }
        }
        {
            RepeatedVarCharHolder out19 = new RepeatedVarCharHolder();
            {
                vv16 .getAccessor().get((inIndex), out19);
            }
            writer23 .setPosition((outIndex));
            out19 .copyAsValue(writer23);
            BitHolder out24 = new BitHolder();
            if (writer23 .ok()) {
                out24 .value = 1;
            } else {
                out24 .value = 0;
            }
            if (out24 .value == 0) {
                return false;
            }
        }
        {
            NullableVarCharHolder out28 = new NullableVarCharHolder();
            out28 .isSet = vv25 .getAccessor().isSet((inIndex));
            if (out28 .isSet == 1) {
                {
                    vv25 .getAccessor().get((inIndex), out28);
                }
            }
            BitHolder out32 = new BitHolder();
            out32 .value = 1;
            if (!(out28 .isSet == 0)) {
                if (!vv29 .getMutator().setSafe((outIndex), out28)) {
                    out32 .value = 0;
                }
            } else {
                if (!vv29 .getMutator().isSafe((outIndex))) {
                    out32 .value = 0;
                }
            }
            if (out32 .value == 0) {
                return false;
            }
        }
        {
            return true;
        }
    }

}

        at 
org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:295)
        at 
org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:187)
        at 
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:368)
        ... 12 more
Caused by: org.codehaus.commons.compiler.CompileException: Line 156, Column 32: 
A method named "copyAsValue" is not declared in any enclosing class nor any 
supertype, nor through a static import
        at 
org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10056)
        at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:7298)
        at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3860)
        at org.codehaus.janino.UnitCompiler.access$6900(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$10.visitMethodInvocation(UnitCompiler.java:3261)
        at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:3978)
        at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3288)
        at 
org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4354)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2662)
        at org.codehaus.janino.UnitCompiler.access$4400(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$7.visitMethodInvocation(UnitCompiler.java:2627)
        at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:3978)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2654)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1634)
        at org.codehaus.janino.UnitCompiler.access$1100(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$4.visitExpressionStatement(UnitCompiler.java:940)
        at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2085)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:962)
        at 
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1004)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:989)
        at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:182)
        at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:939)
        at org.codehaus.janino.Java$Block.accept(Java.java:2005)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:962)
        at 
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1004)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2284)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:826)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:798)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:503)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:389)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:182)
        at 
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:343)
        at 
org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1136)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:350)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:318)
        at 
org.apache.drill.exec.compile.JaninoClassCompiler.getByteCode(JaninoClassCompiler.java:49)
        at 
org.apache.drill.exec.compile.AbstractClassCompiler.getClassByteCode(AbstractClassCompiler.java:39)
        at 
org.apache.drill.exec.compile.QueryClassLoader$ClassCompilerSelector.getClassByteCode(QueryClassLoader.java:140)
        at 
org.apache.drill.exec.compile.QueryClassLoader$ClassCompilerSelector.access$000(QueryClassLoader.java:109)
        at 
org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:102)
        at 
org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:254)
        ... 14 more
{code}


> org.apache.drill.exec.exception.SchemaChangeException when selecting against 
> nested JSON with arrays
> ----------------------------------------------------------------------------------------------------
>
>                 Key: DRILL-1250
>                 URL: https://issues.apache.org/jira/browse/DRILL-1250
>             Project: Apache Drill
>          Issue Type: Bug
>          Components: Storage - JSON
>    Affects Versions: 0.4.0
>         Environment: 5-node hadoop (mapR 3.1.1) cluster
>            Reporter: Andy Pernsteiner
>
> After fighting DRILL-1239, I decided to remove ALL null values from all 
> arrays in my nested JSON file.  Here's a snippet of the data:
> {code}
> {"trans_id":49995,"date":"07/30/2013","time":"17:29:20","user_info":{"cust_id":6112,"device":"IOS5","state":"tx"},"marketing_info":{"camp_id":7,"searchwords":["sad"]},"trans_info":{"prod_id":[338],"purch_flag":"false"}}
> {"trans_id":49996,"date":"05/18/2013","time":"22:34:58","user_info":{"cust_id":85,"device":"AOS4.2","state":"pa"},"marketing_info":{"camp_id":2,"searchwords":["happy"]},"trans_info":{"prod_id":[143,330,65,12],"purch_flag":"false"}}
> {"trans_id":49997,"date":"08/04/2012","time":"13:19:58","user_info":{"cust_id":14,"device":"AOS4.4","state":"tx"},"marketing_info":{"camp_id":10,"searchwords":["none"]},"trans_info":{"prod_id":[0],"purch_flag":"false"}}
> {"trans_id":49998,"date":"07/19/2013","time":"21:33:09","user_info":{"cust_id":168,"device":"IOS5","state":"tx"},"marketing_info":{"camp_id":7,"searchwords":["sad"]},"trans_info":{"prod_id":[0],"purch_flag":"true"}}
> {"trans_id":49999,"date":"01/29/2013","time":"15:24:21","user_info":{"cust_id":9780,"device":"IOS5","state":"ca"},"marketing_info":{"camp_id":1,"searchwords":["none"]},"trans_info":{"prod_id":[30,149],"purch_flag":"false"}}
> {code}
> If I run the following select query I get the error below:
>  select t.trans_id, t.user_info.cust_id as cust_id, 
>  t.user_info.device as device, t.user_info.state as state, 
>  t.marketing_info.camp_id as camp_id, t.marketing_info.searchwords, 
>  t.trans_info.prod_id as prod_id, t.trans_info.purch_flag as purch_flag from 
>  click.`json_large`.`/nested.json` t;
> {code}
> 0: jdbc:drill:>  select t.trans_id, t.user_info.cust_id as cust_id, 
> . . . . . . . >  t.user_info.device as device, t.user_info.state as state, 
> . . . . . . . >  t.marketing_info.camp_id as camp_id, 
> t.marketing_info.searchwords, 
> . . . . . . . >  t.trans_info.prod_id as prod_id, t.trans_info.purch_flag as 
> purch_flag from 
> . . . . . . . >  click.`json_large`.`/nested.json` t;
> Query failed: Screen received stop request sent. Line 156, Column 32: A 
> method named "copyAsValue" is not declared in any enclosing class nor any 
> supertype, nor through a static import [d1668742-dd28-477d-a941-d03f0226d4e4]
> Node details: ip-172-16-1-111:31011/31012
> org.apache.drill.exec.exception.SchemaChangeException: Failure while 
> attempting to load generated class
>       at 
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:371)
>       at 
> org.apache.drill.exec.record.AbstractSingleRecordBatch.innerNext(AbstractSingleRecordBatch.java:66)
>       at 
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext(ProjectRecordBatch.java:120)
>       at 
> org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:95)
>       at 
> org.apache.drill.exec.physical.impl.validate.IteratorValidatorBatchIterator.next(IteratorValidatorBatchIterator.java:116)
>       at 
> org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:59)
>       at 
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:98)
>       at 
> org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:49)
>       at 
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:116)
>       at 
> org.apache.drill.exec.work.WorkManager$RunnableWrapper.run(WorkManager.java:250)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.drill.exec.exception.ClassTransformationException: 
> Failure generating transformation classes for value: 
>  
> package org.apache.drill.exec.test.generated;
> import org.apache.drill.exec.exception.SchemaChangeException;
> import org.apache.drill.exec.expr.holders.BitHolder;
> import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
> import org.apache.drill.exec.expr.holders.RepeatedVarCharHolder;
> import org.apache.drill.exec.ops.FragmentContext;
> import org.apache.drill.exec.record.RecordBatch;
> import org.apache.drill.exec.vector.NullableVarCharVector;
> import org.apache.drill.exec.vector.RepeatedVarCharVector;
> import org.apache.drill.exec.vector.complex.impl.RepeatedVarCharWriterImpl;
> import org.apache.drill.exec.vector.complex.writer.VarCharWriter;
> public class ProjectorGen302 {
>     NullableVarCharVector vv0;
>     NullableVarCharVector vv4;
>     NullableVarCharVector vv8;
>     NullableVarCharVector vv12;
>     RepeatedVarCharVector vv16;
>     RepeatedVarCharVector vv20;
>     VarCharWriter writer23;
>     NullableVarCharVector vv25;
>     NullableVarCharVector vv29;
>     public void doSetup(FragmentContext context, RecordBatch incoming, 
> RecordBatch outgoing)
>         throws SchemaChangeException
>     {
>         {
>             int[] fieldIds1 = new int[ 2 ] ;
>             fieldIds1 [ 0 ] = 3;
>             fieldIds1 [ 1 ] = 1;
>             Object tmp2 = 
> (incoming).getValueAccessorById(NullableVarCharVector.class, 
> fieldIds1).getValueVector();
>             if (tmp2 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv0 with id: org.apache.drill.exec.record.TypedFieldId@7dc6bfa3.");
>             }
>             vv0 = ((NullableVarCharVector) tmp2);
>             int[] fieldIds5 = new int[ 1 ] ;
>             fieldIds5 [ 0 ] = 2;
>             Object tmp6 = 
> (outgoing).getValueAccessorById(NullableVarCharVector.class, 
> fieldIds5).getValueVector();
>             if (tmp6 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv4 with id: org.apache.drill.exec.record.TypedFieldId@adcdf4e1.");
>             }
>             vv4 = ((NullableVarCharVector) tmp6);
>         }
>         {
>             int[] fieldIds9 = new int[ 2 ] ;
>             fieldIds9 [ 0 ] = 3;
>             fieldIds9 [ 1 ] = 2;
>             Object tmp10 = 
> (incoming).getValueAccessorById(NullableVarCharVector.class, 
> fieldIds9).getValueVector();
>             if (tmp10 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv8 with id: org.apache.drill.exec.record.TypedFieldId@7f7b9842.");
>             }
>             vv8 = ((NullableVarCharVector) tmp10);
>             int[] fieldIds13 = new int[ 1 ] ;
>             fieldIds13 [ 0 ] = 3;
>             Object tmp14 = 
> (outgoing).getValueAccessorById(NullableVarCharVector.class, 
> fieldIds13).getValueVector();
>             if (tmp14 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv12 with id: org.apache.drill.exec.record.TypedFieldId@af82cd80.");
>             }
>             vv12 = ((NullableVarCharVector) tmp14);
>         }
>         {
>             int[] fieldIds17 = new int[ 2 ] ;
>             fieldIds17 [ 0 ] = 4;
>             fieldIds17 [ 1 ] = 1;
>             Object tmp18 = 
> (incoming).getValueAccessorById(RepeatedVarCharVector.class, 
> fieldIds17).getValueVector();
>             if (tmp18 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv16 with id: org.apache.drill.exec.record.TypedFieldId@b5f8abde.");
>             }
>             vv16 = ((RepeatedVarCharVector) tmp18);
>             int[] fieldIds21 = new int[ 1 ] ;
>             fieldIds21 [ 0 ] = 5;
>             Object tmp22 = 
> (outgoing).getValueAccessorById(RepeatedVarCharVector.class, 
> fieldIds21).getValueVector();
>             if (tmp22 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv20 with id: org.apache.drill.exec.record.TypedFieldId@b6382fb8.");
>             }
>             vv20 = ((RepeatedVarCharVector) tmp22);
>             writer23 = new RepeatedVarCharWriterImpl(vv20, null);
>         }
>         {
>             int[] fieldIds26 = new int[ 2 ] ;
>             fieldIds26 [ 0 ] = 5;
>             fieldIds26 [ 1 ] = 1;
>             Object tmp27 = 
> (incoming).getValueAccessorById(NullableVarCharVector.class, 
> fieldIds26).getValueVector();
>             if (tmp27 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv25 with id: org.apache.drill.exec.record.TypedFieldId@e7933625.");
>             }
>             vv25 = ((NullableVarCharVector) tmp27);
>             int[] fieldIds30 = new int[ 1 ] ;
>             fieldIds30 [ 0 ] = 7;
>             Object tmp31 = 
> (outgoing).getValueAccessorById(NullableVarCharVector.class, 
> fieldIds30).getValueVector();
>             if (tmp31 == null) {
>                 throw new SchemaChangeException("Failure while loading vector 
> vv29 with id: org.apache.drill.exec.record.TypedFieldId@b6562ffc.");
>             }
>             vv29 = ((NullableVarCharVector) tmp31);
>         }
>     }
>     public boolean doEval(int inIndex, int outIndex)
>         throws SchemaChangeException
>     {
>         {
>             NullableVarCharHolder out3 = new NullableVarCharHolder();
>             out3 .isSet = vv0 .getAccessor().isSet((inIndex));
>             if (out3 .isSet == 1) {
>                 {
>                     vv0 .getAccessor().get((inIndex), out3);
>                 }
>             }
>             BitHolder out7 = new BitHolder();
>             out7 .value = 1;
>             if (!(out3 .isSet == 0)) {
>                 if (!vv4 .getMutator().setSafe((outIndex), out3)) {
>                     out7 .value = 0;
>                 }
>             } else {
>                 if (!vv4 .getMutator().isSafe((outIndex))) {
>                     out7 .value = 0;
>                 }
>             }
>             if (out7 .value == 0) {
>                 return false;
>             }
>         }
>         {
>             NullableVarCharHolder out11 = new NullableVarCharHolder();
>             out11 .isSet = vv8 .getAccessor().isSet((inIndex));
>             if (out11 .isSet == 1) {
>                 {
>                     vv8 .getAccessor().get((inIndex), out11);
>                 }
>             }
>             BitHolder out15 = new BitHolder();
>             out15 .value = 1;
>             if (!(out11 .isSet == 0)) {
>                 if (!vv12 .getMutator().setSafe((outIndex), out11)) {
>                     out15 .value = 0;
>                 }
>             } else {
>                 if (!vv12 .getMutator().isSafe((outIndex))) {
>                     out15 .value = 0;
>                 }
>             }
>             if (out15 .value == 0) {
>        Error: exception while executing query: Failure while trying to get 
> next result batch. (state=,code=0)         return false;
>             }
>         }
>         {
>             RepeatedVarCharHolder out19 = new RepeatedVarCharHolder();
>             {
>                 vv16 .getAccessor().get((inIndex), out19);
>             }
>             writer23 .setPosition((outIndex));
>             out19 .copyAsValue(writer23);
>             BitHolder out24 = new BitHolder();
>             if (writer23 .ok()) {
>                 out24 .value = 1;
>             } else {
>                 out24 .value = 0;
>             }
>             if (out24 .value == 0) {
>                 return false;
>             }
>         }
>         {
>             NullableVarCharHolder out28 = new NullableVarCharHolder();
>             out28 .isSet = vv25 .getAccessor().isSet((inIndex));
>             if (out28 .isSet == 1) {
>                 {
>                     vv25 .getAccessor().get((inIndex), out28);
>                 }
>             }
>             BitHolder out32 = new BitHolder();
>             out32 .value = 1;
>             if (!(out28 .isSet == 0)) {
>                 if (!vv29 .getMutator().setSafe((outIndex), out28)) {
>                     out32 .value = 0;
>                 }
>             } else {
>                 if (!vv29 .getMutator().isSafe((outIndex))) {
>                     out32 .value = 0;
>                 }
>             }
>             if (out32 .value == 0) {
>                 return false;
>             }
>         }
>         {
>             return true;
>         }
>     }
> }
>       at 
> org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:295)
>       at 
> org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:187)
>       at 
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:368)
>       ... 12 more
> Caused by: org.codehaus.commons.compiler.CompileException: Line 156, Column 
> 32: A method named "copyAsValue" is not declared in any enclosing class nor 
> any supertype, nor through a static import
>       at 
> org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10056)
>       at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:7298)
>       at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3860)
>       at org.codehaus.janino.UnitCompiler.access$6900(UnitCompiler.java:182)
>       at 
> org.codehaus.janino.UnitCompiler$10.visitMethodInvocation(UnitCompiler.java:3261)
>       at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:3978)
>       at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3288)
>       at 
> org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4354)
>       at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2662)
>       at org.codehaus.janino.UnitCompiler.access$4400(UnitCompiler.java:182)
>       at 
> org.codehaus.janino.UnitCompiler$7.visitMethodInvocation(UnitCompiler.java:2627)
>       at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:3978)
>       at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2654)
>       at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1634)
>       at org.codehaus.janino.UnitCompiler.access$1100(UnitCompiler.java:182)
>       at 
> org.codehaus.janino.UnitCompiler$4.visitExpressionStatement(UnitCompiler.java:940)
>       at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2085)
>       at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:962)
>       at 
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1004)
>       at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:989)
>       at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:182)
>       at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:939)
>       at org.codehaus.janino.Java$Block.accept(Java.java:2005)
>       at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:962)
>       at 
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1004)
>       at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2284)
>       at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:826)
>       at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:798)
>       at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:503)
>       at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:389)
>       at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:182)
>       at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:343)
>       at 
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1136)
>       at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:350)
>       at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:318)
>       at 
> org.apache.drill.exec.compile.JaninoClassCompiler.getByteCode(JaninoClassCompiler.java:49)
>       at 
> org.apache.drill.exec.compile.AbstractClassCompiler.getClassByteCode(AbstractClassCompiler.java:39)
>       at 
> org.apache.drill.exec.compile.QueryClassLoader$ClassCompilerSelector.getClassByteCode(QueryClassLoader.java:140)
>       at 
> org.apache.drill.exec.compile.QueryClassLoader$ClassCompilerSelector.access$000(QueryClassLoader.java:109)
>       at 
> org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:102)
>       at 
> org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:254)
>       ... 14 more
> {code}



--
This message was sent by Atlassian JIRA
(v6.2#6252)

Reply via email to