[ 
https://issues.apache.org/jira/browse/HIVE-14662?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Nemon Lou updated HIVE-14662:
-----------------------------
    Status: Patch Available  (was: Open)

> Wrong Class Instance When Using Custom SERDE
> --------------------------------------------
>
>                 Key: HIVE-14662
>                 URL: https://issues.apache.org/jira/browse/HIVE-14662
>             Project: Hive
>          Issue Type: Bug
>          Components: Serializers/Deserializers
>            Reporter: Nemon Lou
>            Assignee: Nemon Lou
>         Attachments: HIVE-14662.patch
>
>
> Using  [SERDE for 
> mongoDB|https://github.com/mongodb/mongo-hadoop/blob/master/hive/src/main/java/com/mongodb/hadoop/hive/BSONSerDe.java]
> DDL
> {noformat}
> create external table mytable (ID STRING..) 
> ROW FORMAT SERDE  'com.mongodb.hadoop.hive.BSONSerDe' 
> WITH SERDEPROPERTIES('mongo.columns.mapping'='{"ID":"_id",.. }')
> STORED AS INPUTFORMAT 'com.mongodb.hadoop.mapred.BSONFileInputFormat'
> OUTPUTFORMAT 'com.mongodb.hadoop.hive.output.HiveBSONFileOutputFormat'
> LOCATION 'hdfs:///mypath'; 
> {noformat}
> Open beeline and run the following query ,and then open another beeline,run 
> this again.Then fails.
> {noformat}
> add jar hdfs:///tmp/mongo-hadoop-hive-1.4.2_new.jar;
> add jar hdfs:///tmp/mongo-java-driver-3.0.4.jar;
> add jar hdfs:///tmp/mongo-hadoop-core-1.4.2_new.jar;
> select * from mytable limit 1;
> {noformat}
> Error log :
> {noformat}
> 2016-08-25 09:30:34,475 | WARN  | HiveServer2-Handler-Pool: Thread-11972 | 
> Error fetching results:  | 
> org.apache.hive.service.cli.thrift.ThriftCLIService.FetchResults(ThriftCLIService.java:1058)
> org.apache.hive.service.cli.HiveSQLException: java.io.IOException: 
> org.apache.hadoop.hive.serde2.SerDeException: class 
> com.mongodb.hadoop.hive.BSONSerDerequires a BSONWritable object, notclass 
> com.mongodb.hadoop.io.BSONWritable
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:366)
>         at 
> org.apache.hive.service.cli.operation.OperationManager.getOperationNextRowSet(OperationManager.java:251)
>         at 
> org.apache.hive.service.cli.session.HiveSessionImpl.fetchResults(HiveSessionImpl.java:710)
>         at sun.reflect.GeneratedMethodAccessor45.invoke(Unknown Source)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:497)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:422)
>         at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1673)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
>         at com.sun.proxy.$Proxy20.fetchResults(Unknown Source)
>         at 
> org.apache.hive.service.cli.CLIService.fetchResults(CLIService.java:451)
>         at 
> org.apache.hive.service.cli.thrift.ThriftCLIService.FetchResults(ThriftCLIService.java:1049)
>         at 
> org.apache.hive.service.cli.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1553)
>         at 
> org.apache.hive.service.cli.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1538)
>         at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
>         at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
>         at 
> org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingProcessor.process(HadoopThriftAuthBridge.java:692)
>         at 
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:285)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: 
> class com.mongodb.hadoop.hive.BSONSerDerequires a BSONWritable object, 
> notclass com.mongodb.hadoop.io.BSONWritable
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:507)
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:414)
>         at org.apache.hadoop.hive.ql.exec.FetchTask.fetch(FetchTask.java:140)
>         at org.apache.hadoop.hive.ql.Driver.getResults(Driver.java:1756)
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:361)
>         ... 24 more
> Caused by: org.apache.hadoop.hive.serde2.SerDeException: class 
> com.mongodb.hadoop.hive.BSONSerDerequires a BSONWritable object, notclass 
> com.mongodb.hadoop.io.BSONWritable
>         at com.mongodb.hadoop.hive.BSONSerDe.deserialize(BSONSerDe.java:196)
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:488)
>         ... 28 more
> {noformat}
> Note:must make sure the table is not empty.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to