[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16559257#comment-16559257 ] ASF GitHub Bot commented on HIVE-17829: --- Github user anishek closed the pull request at: https://github.com/apache/hive/pull/283 > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Labels: pull-request-available > Fix For: 3.0.0 > > Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1905) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1607) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1354) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1123) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16300955#comment-16300955 ] anishek commented on HIVE-17829: Test failures are not related to this patch. Patch committed to master. Thanks [~thejas] for the review! > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Labels: pull-request-available > Fix For: 3.0.0 > > Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1905) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1607) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1354) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1123) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16299983#comment-16299983 ] Hive QA commented on HIVE-17829: Here are the results of testing the latest attachment: https://issues.apache.org/jira/secure/attachment/12903163/HIVE-17829.1.patch {color:green}SUCCESS:{color} +1 due to 1 test(s) being added or modified. {color:red}ERROR:{color} -1 due to 20 failed/errored test(s), 11538 tests executed *Failed tests:* {noformat} org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[auto_join25] (batchId=72) org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[fp_literal_arithmetic] (batchId=68) org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[ppd_join5] (batchId=35) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[bucketsortoptimize_insert_2] (batchId=151) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[hybridgrace_hashjoin_2] (batchId=156) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[insert_values_orig_table_use_metadata] (batchId=164) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[llap_acid] (batchId=168) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[llap_acid_fast] (batchId=159) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[sysdb] (batchId=159) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[vector_reduce_groupby_duplicate_cols] (batchId=158) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[authorization_part] (batchId=93) org.apache.hadoop.hive.cli.TestSparkCliDriver.testCliDriver[ppd_join5] (batchId=120) org.apache.hadoop.hive.metastore.TestEmbeddedHiveMetaStore.testTransactionalValidation (batchId=213) org.apache.hadoop.hive.ql.TestAcidOnTez.testMapJoinOnTez (batchId=222) org.apache.hadoop.hive.ql.parse.TestReplicationScenarios.testConstraints (batchId=225) org.apache.hive.hcatalog.pig.TestHCatLoaderComplexSchema.testSyntheticComplexSchema[5] (batchId=190) org.apache.hive.hcatalog.pig.TestHCatLoaderComplexSchema.testTupleInBagInTupleInBag[5] (batchId=190) org.apache.hive.jdbc.TestSSL.testConnectionMismatch (batchId=231) org.apache.hive.jdbc.TestSSL.testConnectionWrongCertCN (batchId=231) org.apache.hive.jdbc.TestSSL.testMetastoreConnectionWrongCertCN (batchId=231) {noformat} Test results: https://builds.apache.org/job/PreCommit-HIVE-Build/8353/testReport Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/8353/console Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-8353/ Messages: {noformat} Executing org.apache.hive.ptest.execution.TestCheckPhase Executing org.apache.hive.ptest.execution.PrepPhase Executing org.apache.hive.ptest.execution.YetusPhase Executing org.apache.hive.ptest.execution.ExecutionPhase Executing org.apache.hive.ptest.execution.ReportingPhase Tests exited with: TestsFailedException: 20 tests failed {noformat} This message is automatically generated. ATTACHMENT ID: 12903163 - PreCommit-HIVE-Build > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Labels: pull-request-available > Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) >
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16299913#comment-16299913 ] Hive QA commented on HIVE-17829: | (/) *{color:green}+1 overall{color}* | \\ \\ || Vote || Subsystem || Runtime || Comment || || || || || {color:brown} Prechecks {color} || | {color:blue}0{color} | {color:blue} findbugs {color} | {color:blue} 0m 1s{color} | {color:blue} Findbugs executables are not available. {color} | | {color:green}+1{color} | {color:green} @author {color} | {color:green} 0m 0s{color} | {color:green} The patch does not contain any @author tags. {color} | || || || || {color:brown} master Compile Tests {color} || | {color:green}+1{color} | {color:green} mvninstall {color} | {color:green} 6m 36s{color} | {color:green} master passed {color} | | {color:green}+1{color} | {color:green} compile {color} | {color:green} 0m 18s{color} | {color:green} master passed {color} | | {color:green}+1{color} | {color:green} checkstyle {color} | {color:green} 0m 9s{color} | {color:green} master passed {color} | | {color:green}+1{color} | {color:green} javadoc {color} | {color:green} 0m 12s{color} | {color:green} master passed {color} | || || || || {color:brown} Patch Compile Tests {color} || | {color:green}+1{color} | {color:green} mvninstall {color} | {color:green} 0m 19s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} compile {color} | {color:green} 0m 16s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} javac {color} | {color:green} 0m 16s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} checkstyle {color} | {color:green} 0m 9s{color} | {color:green} hbase-handler: The patch generated 0 new + 6 unchanged - 4 fixed = 6 total (was 10) {color} | | {color:green}+1{color} | {color:green} whitespace {color} | {color:green} 0m 0s{color} | {color:green} The patch has no whitespace issues. {color} | | {color:green}+1{color} | {color:green} javadoc {color} | {color:green} 0m 11s{color} | {color:green} the patch passed {color} | || || || || {color:brown} Other Tests {color} || | {color:green}+1{color} | {color:green} asflicense {color} | {color:green} 0m 12s{color} | {color:green} The patch does not generate ASF License warnings. {color} | | {color:black}{color} | {color:black} {color} | {color:black} 8m 38s{color} | {color:black} {color} | \\ \\ || Subsystem || Report/Notes || | Optional Tests | asflicense javac javadoc findbugs checkstyle compile | | uname | Linux hiveptest-server-upstream 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1+deb8u1 (2016-09-03) x86_64 GNU/Linux | | Build tool | maven | | Personality | /data/hiveptest/working/yetus/dev-support/hive-personality.sh | | git revision | master / ad5bcb1 | | Default Java | 1.8.0_111 | | modules | C: hbase-handler U: hbase-handler | | Console output | http://104.198.109.242/logs//PreCommit-HIVE-Build-8353/yetus.txt | | Powered by | Apache Yetushttp://yetus.apache.org | This message was automatically generated. > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Labels: pull-request-available > Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at >
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16299503#comment-16299503 ] Thejas M Nair commented on HIVE-17829: -- +1 > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Labels: pull-request-available > Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1905) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1607) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1354) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1123) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1116) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16298131#comment-16298131 ] ASF GitHub Bot commented on HIVE-17829: --- GitHub user anishek opened a pull request: https://github.com/apache/hive/pull/283 HIVE-17829: ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 You can merge this pull request into a Git repository by running: $ git pull https://github.com/anishek/hive HIVE-17829 Alternatively you can review and apply these changes as the patch at: https://github.com/apache/hive/pull/283.patch To close this pull request, make a commit to your master/trunk branch with (at least) the following in the commit message: This closes #283 commit 5c0b586af9323a2095c24507665064a32252834b Author: Anishek AgarwalDate: 2017-12-20T08:57:43Z HIVE-17829: ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Labels: pull-request-available > Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197) >
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16296205#comment-16296205 ] anishek commented on HIVE-17829: [~thejas] will look at the tests too add one. Yeh looks like if there are comments then users should be able to get around this issue. > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Attachments: HIVE-17829.0.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1905) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1607) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1354) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1123) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1116) >
[jira] [Commented] (HIVE-17829) ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
[ https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16295677#comment-16295677 ] Thejas M Nair commented on HIVE-17829: -- The change looks good. Can you also add a UT ? For users without a fix, can they workaround it by adding comments ? > ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2 > -- > > Key: HIVE-17829 > URL: https://issues.apache.org/jira/browse/HIVE-17829 > Project: Hive > Issue Type: Bug > Components: HBase Handler >Affects Versions: 2.1.0 >Reporter: Chiran Ravani >Assignee: anishek >Priority: Critical > Attachments: HIVE-17829.0.patch > > > Stack > {code} > 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]: > metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from > serde: org.apache.hadoop.hive.hbase.HBaseSerDe > java.lang.ArrayIndexOutOfBoundsException: 1 > at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77] > at > org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.(LazySimpleStructObjectInspector.java:77) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127) > ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261) > ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1905) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1607) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1354) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1123) > [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1116) >