[ 
https://issues.apache.org/jira/browse/HIVE-7850?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14107432#comment-14107432
 ] 

Hive QA commented on HIVE-7850:
-------------------------------



{color:red}Overall{color}: -1 no tests executed

Here are the results of testing the latest attachment:
https://issues.apache.org/jira/secure/attachment/12663651/HIVE-7850.patch

Test results: 
http://ec2-174-129-184-35.compute-1.amazonaws.com/jenkins/job/PreCommit-HIVE-TRUNK-Build/465/testReport
Console output: 
http://ec2-174-129-184-35.compute-1.amazonaws.com/jenkins/job/PreCommit-HIVE-TRUNK-Build/465/console
Test logs: 
http://ec2-174-129-184-35.compute-1.amazonaws.com/logs/PreCommit-HIVE-TRUNK-Build-465/

Messages:
{noformat}
Executing org.apache.hive.ptest.execution.PrepPhase
Tests exited with: NonZeroExitCodeException
Command 'bash /data/hive-ptest/working/scratch/source-prep.sh' failed with exit 
status 1 and output '+ [[ -n /usr/java/jdk1.7.0_45-cloudera ]]
+ export JAVA_HOME=/usr/java/jdk1.7.0_45-cloudera
+ JAVA_HOME=/usr/java/jdk1.7.0_45-cloudera
+ export 
PATH=/usr/java/jdk1.7.0_45-cloudera/bin/:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-maven-3.0.5/bin:/usr/local/apache-maven-3.0.5/bin:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-ant-1.9.1/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/home/hiveptest/bin
+ 
PATH=/usr/java/jdk1.7.0_45-cloudera/bin/:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-maven-3.0.5/bin:/usr/local/apache-maven-3.0.5/bin:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-ant-1.9.1/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/home/hiveptest/bin
+ export 'ANT_OPTS=-Xmx1g -XX:MaxPermSize=256m '
+ ANT_OPTS='-Xmx1g -XX:MaxPermSize=256m '
+ export 'M2_OPTS=-Xmx1g -XX:MaxPermSize=256m -Dhttp.proxyHost=localhost 
-Dhttp.proxyPort=3128'
+ M2_OPTS='-Xmx1g -XX:MaxPermSize=256m -Dhttp.proxyHost=localhost 
-Dhttp.proxyPort=3128'
+ cd /data/hive-ptest/working/
+ tee /data/hive-ptest/logs/PreCommit-HIVE-TRUNK-Build-465/source-prep.txt
+ [[ false == \t\r\u\e ]]
+ mkdir -p maven ivy
+ [[ svn = \s\v\n ]]
+ [[ -n '' ]]
+ [[ -d apache-svn-trunk-source ]]
+ [[ ! -d apache-svn-trunk-source/.svn ]]
+ [[ ! -d apache-svn-trunk-source ]]
+ cd apache-svn-trunk-source
+ svn revert -R .
Reverted 'hbase-handler/src/test/results/positive/hbase_custom_key3.q.out'
Reverted 'hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out'
Reverted 
'hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory.java'
Reverted 
'hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseKeyFactory2.java'
Reverted 'hbase-handler/src/test/queries/positive/hbase_ppd_key_range.q'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java'
Reverted 'hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyFactory.java'
Reverted 'hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java'
Reverted 
'hbase-handler/src/java/org/apache/hadoop/hive/hbase/ColumnMappings.java'
Reverted 
'ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java'
Reverted 'ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java'
Reverted 'ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java'
Reverted 'ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java'
Reverted 'ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g'
Reverted 
'ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java'
Reverted 'ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java'
Reverted 
'ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java'
++ egrep -v '^X|^Performing status on external'
++ awk '{print $2}'
++ svn status --no-ignore
+ rm -rf target datanucleus.log ant/target shims/target shims/0.20/target 
shims/0.20S/target shims/0.23/target shims/aggregator/target 
shims/common/target shims/common-secure/target packaging/target 
hbase-handler/target hbase-handler/src/test/results/positive/hbase_ppd_or.q.out 
hbase-handler/src/test/queries/positive/hbase_ppd_or.q 
hbase-handler/src/java/org/apache/hadoop/hive/hbase/OrPredicateHBaseKeyFactory.java
 hbase-handler/src/java/org/apache/hadoop/hive/hbase/predicate 
hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanFactory.java 
testutils/target jdbc/target metastore/target itests/target 
itests/hcatalog-unit/target itests/test-serde/target itests/qtest/target 
itests/hive-unit-hadoop2/target itests/hive-minikdc/target 
itests/hive-unit/target itests/custom-serde/target itests/util/target 
hcatalog/target hcatalog/core/target hcatalog/streaming/target 
hcatalog/server-extensions/target hcatalog/webhcat/svr/target 
hcatalog/webhcat/java-client/target hcatalog/hcatalog-pig-adapter/target 
accumulo-handler/target hwi/target common/target common/src/gen contrib/target 
service/target serde/target beeline/target odbc/target cli/target 
ql/dependency-reduced-pom.xml ql/target
+ svn update

Fetching external item into 'hcatalog/src/test/e2e/harness'
External at revision 1619922.

At revision 1619922.
+ patchCommandPath=/data/hive-ptest/working/scratch/smart-apply-patch.sh
+ patchFilePath=/data/hive-ptest/working/scratch/build.patch
+ [[ -f /data/hive-ptest/working/scratch/build.patch ]]
+ chmod +x /data/hive-ptest/working/scratch/smart-apply-patch.sh
+ /data/hive-ptest/working/scratch/smart-apply-patch.sh 
/data/hive-ptest/working/scratch/build.patch
The patch does not appear to apply with p0, p1, or p2
+ exit 1
'
{noformat}

This message is automatically generated.

ATTACHMENT ID: 12663651

> Hive Query failed if the data type is array<string> with parquet files
> ----------------------------------------------------------------------
>
>                 Key: HIVE-7850
>                 URL: https://issues.apache.org/jira/browse/HIVE-7850
>             Project: Hive
>          Issue Type: Bug
>          Components: Serializers/Deserializers
>    Affects Versions: 0.14.0, 0.13.1
>            Reporter: Sathish
>            Assignee: Sathish
>              Labels: parquet, serde
>             Fix For: 0.14.0
>
>         Attachments: HIVE-7850.patch
>
>
> * Created a parquet file from the Avro file which have 1 array data type and 
> rest are primitive types. Avro Schema of the array data type. Eg: 
> {code}
> { "name" : "action", "type" : [ { "type" : "array", "items" : "string" }, 
> "null" ] }
> {code}
> * Created External Hive table with the Array type as below, 
> {code}
> create external table paraArray (action Array) partitioned by (partitionid 
> int) row format serde 'parquet.hive.serde.ParquetHiveSerDe' stored as 
> inputformat 'parquet.hive.MapredParquetInputFormat' outputformat 
> 'parquet.hive.MapredParquetOutputFormat' location '/testPara'; 
> alter table paraArray add partition(partitionid=1) location '/testPara';
> {code}
> * Run the following query(select action from paraArray limit 10) and the Map 
> reduce jobs are failing with the following exception.
> {code}
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime 
> Error while processing row [Error getting row data with exception 
> java.lang.ClassCastException: 
> parquet.hive.writable.BinaryWritable$DicBinaryWritable cannot be cast to 
> org.apache.hadoop.io.ArrayWritable
> at 
> parquet.hive.serde.ParquetHiveArrayInspector.getList(ParquetHiveArrayInspector.java:125)
> at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:315)
> at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:371)
> at org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
> at org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
> at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:665)
> at org.apache.hadoop.hive.ql.exec.ExecMapper.map(ExecMapper.java:144)
> at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)
> at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:405)
> at org.apache.hadoop.mapred.MapTask.run(MapTask.java:336)
> at org.apache.hadoop.mapred.Child$4.run(Child.java:270)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1126)
> at org.apache.hadoop.mapred.Child.main(Child.java:264)
> ]
> at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:671)
> at org.apache.hadoop.hive.ql.exec.ExecMapper.map(ExecMapper.java:144)
> ... 8 more
> {code}
> This issue has long back posted on Parquet issues list and Since this is 
> related to Parquet Hive serde, I have created the Hive issue here, The 
> details and history of this information are as shown in the link here 
> https://github.com/Parquet/parquet-mr/issues/281.



--
This message was sent by Atlassian JIRA
(v6.2#6252)

Reply via email to