[ 
https://issues.apache.org/jira/browse/HIVE-7850?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14111075#comment-14111075
 ] 

Hive QA commented on HIVE-7850:
-------------------------------



{color:red}Overall{color}: -1 no tests executed

Here are the results of testing the latest attachment:
https://issues.apache.org/jira/secure/attachment/12664368/HIVE-7850.2.patch

Test results: 
http://ec2-174-129-184-35.compute-1.amazonaws.com/jenkins/job/PreCommit-HIVE-TRUNK-Build/506/testReport
Console output: 
http://ec2-174-129-184-35.compute-1.amazonaws.com/jenkins/job/PreCommit-HIVE-TRUNK-Build/506/console
Test logs: 
http://ec2-174-129-184-35.compute-1.amazonaws.com/logs/PreCommit-HIVE-TRUNK-Build-506/

Messages:
{noformat}
Executing org.apache.hive.ptest.execution.PrepPhase
Tests exited with: NonZeroExitCodeException
Command 'bash /data/hive-ptest/working/scratch/source-prep.sh' failed with exit 
status 1 and output '+ [[ -n /usr/java/jdk1.7.0_45-cloudera ]]
+ export JAVA_HOME=/usr/java/jdk1.7.0_45-cloudera
+ JAVA_HOME=/usr/java/jdk1.7.0_45-cloudera
+ export 
PATH=/usr/java/jdk1.7.0_45-cloudera/bin/:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-maven-3.0.5/bin:/usr/local/apache-maven-3.0.5/bin:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-ant-1.9.1/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/home/hiveptest/bin
+ 
PATH=/usr/java/jdk1.7.0_45-cloudera/bin/:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-maven-3.0.5/bin:/usr/local/apache-maven-3.0.5/bin:/usr/java/jdk1.6.0_34/bin:/usr/local/apache-ant-1.9.1/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/home/hiveptest/bin
+ export 'ANT_OPTS=-Xmx1g -XX:MaxPermSize=256m '
+ ANT_OPTS='-Xmx1g -XX:MaxPermSize=256m '
+ export 'M2_OPTS=-Xmx1g -XX:MaxPermSize=256m -Dhttp.proxyHost=localhost 
-Dhttp.proxyPort=3128'
+ M2_OPTS='-Xmx1g -XX:MaxPermSize=256m -Dhttp.proxyHost=localhost 
-Dhttp.proxyPort=3128'
+ cd /data/hive-ptest/working/
+ tee /data/hive-ptest/logs/PreCommit-HIVE-TRUNK-Build-506/source-prep.txt
+ [[ false == \t\r\u\e ]]
+ mkdir -p maven ivy
+ [[ svn = \s\v\n ]]
+ [[ -n '' ]]
+ [[ -d apache-svn-trunk-source ]]
+ [[ ! -d apache-svn-trunk-source/.svn ]]
+ [[ ! -d apache-svn-trunk-source ]]
+ cd apache-svn-trunk-source
+ svn revert -R .
Reverted 'common/src/java/org/apache/hadoop/hive/conf/HiveConf.java'
Reverted 'service/src/java/org/apache/hive/service/cli/ICLIService.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java'
Reverted 'service/src/java/org/apache/hive/service/cli/CLIServiceClient.java'
Reverted 'service/src/java/org/apache/hive/service/cli/CLIService.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java'
Reverted 'service/src/java/org/apache/hive/service/cli/session/HiveSession.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/session/SessionManager.java'
Reverted 'service/src/java/org/apache/hive/service/cli/operation/Operation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/OperationManager.java'
Reverted 
'service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java'
Reverted 'service/src/gen/thrift/gen-py/TCLIService/ttypes.py'
Reverted 'service/src/gen/thrift/gen-cpp/TCLIService_types.cpp'
Reverted 'service/src/gen/thrift/gen-cpp/TCLIService_types.h'
Reverted 'service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb'
Reverted 
'service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java'
Reverted 'service/if/TCLIService.thrift'
++ egrep -v '^X|^Performing status on external'
++ awk '{print $2}'
++ svn status --no-ignore
+ rm -rf target datanucleus.log ant/target shims/target shims/0.20/target 
shims/0.20S/target shims/0.23/target shims/aggregator/target 
shims/common/target shims/common-secure/target packaging/target 
hbase-handler/target testutils/target jdbc/target metastore/target 
itests/target itests/hcatalog-unit/target itests/test-serde/target 
itests/qtest/target itests/hive-unit-hadoop2/target itests/hive-minikdc/target 
itests/hive-unit/target itests/custom-serde/target itests/util/target 
hcatalog/target hcatalog/core/target hcatalog/streaming/target 
hcatalog/server-extensions/target hcatalog/hcatalog-pig-adapter/target 
hcatalog/webhcat/svr/target hcatalog/webhcat/java-client/target 
accumulo-handler/target hwi/target common/target common/src/gen service/target 
service/src/test/org/apache/hive/service/cli/operation 
service/src/java/org/apache/hive/service/cli/FetchType.java 
service/src/java/org/apache/hive/service/cli/operation/OperationLog.java 
service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java 
contrib/target serde/target beeline/target odbc/target cli/target 
ql/dependency-reduced-pom.xml ql/target
+ svn update
U    ql/src/test/queries/clientpositive/optimize_nullscan.q
U    ql/src/test/results/clientpositive/optimize_nullscan.q.out
U    ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out

Fetching external item into 'hcatalog/src/test/e2e/harness'
Updated external to revision 1620682.

Updated to revision 1620682.
+ patchCommandPath=/data/hive-ptest/working/scratch/smart-apply-patch.sh
+ patchFilePath=/data/hive-ptest/working/scratch/build.patch
+ [[ -f /data/hive-ptest/working/scratch/build.patch ]]
+ chmod +x /data/hive-ptest/working/scratch/smart-apply-patch.sh
+ /data/hive-ptest/working/scratch/smart-apply-patch.sh 
/data/hive-ptest/working/scratch/build.patch
The patch does not appear to apply with p0, p1, or p2
+ exit 1
'
{noformat}

This message is automatically generated.

ATTACHMENT ID: 12664368

> Hive Query failed if the data type is array<string> with parquet files
> ----------------------------------------------------------------------
>
>                 Key: HIVE-7850
>                 URL: https://issues.apache.org/jira/browse/HIVE-7850
>             Project: Hive
>          Issue Type: Bug
>          Components: Serializers/Deserializers
>    Affects Versions: 0.14.0, 0.13.1
>            Reporter: Sathish
>            Assignee: Sathish
>              Labels: parquet, serde
>             Fix For: 0.14.0
>
>         Attachments: HIVE-7850.1.patch, HIVE-7850.2.patch, HIVE-7850.patch
>
>
> * Created a parquet file from the Avro file which have 1 array data type and 
> rest are primitive types. Avro Schema of the array data type. Eg: 
> {code}
> { "name" : "action", "type" : [ { "type" : "array", "items" : "string" }, 
> "null" ] }
> {code}
> * Created External Hive table with the Array type as below, 
> {code}
> create external table paraArray (action Array) partitioned by (partitionid 
> int) row format serde 'parquet.hive.serde.ParquetHiveSerDe' stored as 
> inputformat 'parquet.hive.MapredParquetInputFormat' outputformat 
> 'parquet.hive.MapredParquetOutputFormat' location '/testPara'; 
> alter table paraArray add partition(partitionid=1) location '/testPara';
> {code}
> * Run the following query(select action from paraArray limit 10) and the Map 
> reduce jobs are failing with the following exception.
> {code}
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime 
> Error while processing row [Error getting row data with exception 
> java.lang.ClassCastException: 
> parquet.hive.writable.BinaryWritable$DicBinaryWritable cannot be cast to 
> org.apache.hadoop.io.ArrayWritable
> at 
> parquet.hive.serde.ParquetHiveArrayInspector.getList(ParquetHiveArrayInspector.java:125)
> at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:315)
> at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:371)
> at org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
> at org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
> at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:665)
> at org.apache.hadoop.hive.ql.exec.ExecMapper.map(ExecMapper.java:144)
> at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)
> at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:405)
> at org.apache.hadoop.mapred.MapTask.run(MapTask.java:336)
> at org.apache.hadoop.mapred.Child$4.run(Child.java:270)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1126)
> at org.apache.hadoop.mapred.Child.main(Child.java:264)
> ]
> at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:671)
> at org.apache.hadoop.hive.ql.exec.ExecMapper.map(ExecMapper.java:144)
> ... 8 more
> {code}
> This issue has long back posted on Parquet issues list and Since this is 
> related to Parquet Hive serde, I have created the Hive issue here, The 
> details and history of this information are as shown in the link here 
> https://github.com/Parquet/parquet-mr/issues/281.



--
This message was sent by Atlassian JIRA
(v6.2#6252)

Reply via email to