[
https://issues.apache.org/jira/browse/HIVE-17829?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16299913#comment-16299913
]
Hive QA commented on HIVE-17829:
--------------------------------
| (/) *{color:green}+1 overall{color}* |
\\
\\
|| Vote || Subsystem || Runtime || Comment ||
|| || || || {color:brown} Prechecks {color} ||
| {color:blue}0{color} | {color:blue} findbugs {color} | {color:blue} 0m
1s{color} | {color:blue} Findbugs executables are not available. {color} |
| {color:green}+1{color} | {color:green} @author {color} | {color:green} 0m
0s{color} | {color:green} The patch does not contain any @author tags. {color} |
|| || || || {color:brown} master Compile Tests {color} ||
| {color:green}+1{color} | {color:green} mvninstall {color} | {color:green} 6m
36s{color} | {color:green} master passed {color} |
| {color:green}+1{color} | {color:green} compile {color} | {color:green} 0m
18s{color} | {color:green} master passed {color} |
| {color:green}+1{color} | {color:green} checkstyle {color} | {color:green} 0m
9s{color} | {color:green} master passed {color} |
| {color:green}+1{color} | {color:green} javadoc {color} | {color:green} 0m
12s{color} | {color:green} master passed {color} |
|| || || || {color:brown} Patch Compile Tests {color} ||
| {color:green}+1{color} | {color:green} mvninstall {color} | {color:green} 0m
19s{color} | {color:green} the patch passed {color} |
| {color:green}+1{color} | {color:green} compile {color} | {color:green} 0m
16s{color} | {color:green} the patch passed {color} |
| {color:green}+1{color} | {color:green} javac {color} | {color:green} 0m
16s{color} | {color:green} the patch passed {color} |
| {color:green}+1{color} | {color:green} checkstyle {color} | {color:green} 0m
9s{color} | {color:green} hbase-handler: The patch generated 0 new + 6
unchanged - 4 fixed = 6 total (was 10) {color} |
| {color:green}+1{color} | {color:green} whitespace {color} | {color:green} 0m
0s{color} | {color:green} The patch has no whitespace issues. {color} |
| {color:green}+1{color} | {color:green} javadoc {color} | {color:green} 0m
11s{color} | {color:green} the patch passed {color} |
|| || || || {color:brown} Other Tests {color} ||
| {color:green}+1{color} | {color:green} asflicense {color} | {color:green} 0m
12s{color} | {color:green} The patch does not generate ASF License warnings.
{color} |
| {color:black}{color} | {color:black} {color} | {color:black} 8m 38s{color} |
{color:black} {color} |
\\
\\
|| Subsystem || Report/Notes ||
| Optional Tests | asflicense javac javadoc findbugs checkstyle compile |
| uname | Linux hiveptest-server-upstream 3.16.0-4-amd64 #1 SMP Debian
3.16.36-1+deb8u1 (2016-09-03) x86_64 GNU/Linux |
| Build tool | maven |
| Personality | /data/hiveptest/working/yetus/dev-support/hive-personality.sh |
| git revision | master / ad5bcb1 |
| Default Java | 1.8.0_111 |
| modules | C: hbase-handler U: hbase-handler |
| Console output |
http://104.198.109.242/logs//PreCommit-HIVE-Build-8353/yetus.txt |
| Powered by | Apache Yetus http://yetus.apache.org |
This message was automatically generated.
> ArrayIndexOutOfBoundsException - HBASE-backed tables with Avro schema in Hive2
> ------------------------------------------------------------------------------
>
> Key: HIVE-17829
> URL: https://issues.apache.org/jira/browse/HIVE-17829
> Project: Hive
> Issue Type: Bug
> Components: HBase Handler
> Affects Versions: 2.1.0
> Reporter: Chiran Ravani
> Assignee: anishek
> Priority: Critical
> Labels: pull-request-available
> Attachments: HIVE-17829.0.patch, HIVE-17829.1.patch
>
>
> Stack
> {code}
> 2017-10-09T09:39:54,804 ERROR [HiveServer2-Background-Pool: Thread-95]:
> metadata.Table (Table.java:getColsInternal(642)) - Unable to get field from
> serde: org.apache.hadoop.hive.hbase.HBaseSerDe
> java.lang.ArrayIndexOutOfBoundsException: 1
> at java.util.Arrays$ArrayList.get(Arrays.java:3841) ~[?:1.8.0_77]
> at
> org.apache.hadoop.hive.serde2.BaseStructObjectInspector.init(BaseStructObjectInspector.java:104)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.init(LazySimpleStructObjectInspector.java:97)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector.<init>(LazySimpleStructObjectInspector.java:77)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(LazyObjectInspectorFactory.java:115)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.hbase.HBaseLazyObjectFactory.createLazyHBaseStructInspector(HBaseLazyObjectFactory.java:79)
> ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:127)
> ~[hive-hbase-handler-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:531)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:424)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:411)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:279)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:261)
> ~[hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:639)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:622)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:833)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:869)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4228)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:347)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:197)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1905)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1607)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1354)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1123)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1116)
> [hive-exec-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:242)
> [hive-service-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$800(SQLOperation.java:91)
> [hive-service-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:334)
> [hive-service-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at java.security.AccessController.doPrivileged(Native Method)
> ~[?:1.8.0_77]
> at javax.security.auth.Subject.doAs(Subject.java:422) [?:1.8.0_77]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
> [hadoop-common-2.7.3.2.6.2.0-205.jar:?]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:348)
> [hive-service-2.1.0.2.6.2.0-205.jar:2.1.0.2.6.2.0-205]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> [?:1.8.0_77]
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> [?:1.8.0_77]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> [?:1.8.0_77]
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> [?:1.8.0_77]
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> [?:1.8.0_77]
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> [?:1.8.0_77]
> at java.lang.Thread.run(Thread.java:745) [?:1.8.0_77]
> {code}
> Steps to Repro:
> {code}
> Create Hbase Table:
> ========================
> create 'hbase_avro_table', 'test_col_fam', 'test_col'
> Create Hive Table:
> =========================
> CREATE EXTERNAL TABLE test_hbase_avro2
> ROW FORMAT SERDE 'org.apache.hadoop.hive.hbase.HBaseSerDe'
> STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
> WITH SERDEPROPERTIES (
> "hbase.columns.mapping" = ":key,test_col_fam:test_col",
> "test_col_fam.test_col.serialization.type" = "avro",
> "test_col_fam.test_col.avro.schema.url" =
> "hdfs://rpathak-h1.openstacklocal:8020/user/hive/schema.avsc")
> TBLPROPERTIES (
> "hbase.table.name" = "hbase_avro_table",
> "hbase.mapred.output.outputtable" = "hbase_avro_table",
> "hbase.struct.autogenerate"="true",
> "avro.schema.literal"='{
> "type": "record",
> "name": "test_hbase_avro",
> "fields": [
> { "name":"test_col", "type":"string"}
> ]
> }');
> {code}
> The same query works with Hive 1.2.1
--
This message was sent by Atlassian JIRA
(v6.4.14#64029)