[ 
https://issues.apache.org/jira/browse/HIVE-12088?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14963085#comment-14963085
 ] 

Feng Yuan commented on HIVE-12088:
----------------------------------

there are two other error in this job:
2015-10-19 17:27:37,015 ERROR [main]: mr.ExecDriver 
(ExecDriver.java:execute(400)) - yarn
2015-10-19 17:27:38,424 WARN  [main]: jdbc.JDBCStatsPublisher 
(JDBCStatsPublisher.java:init(310)) - Failed to update ID (size 255)
com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: You have an error in 
your SQL syntax; check the manual that corresponds to your MySQL server version 
for the right syntax to use near 'VARCHAR(4000)' at line 1
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at com.mysql.jdbc.Util.handleNewInstance(Util.java:406)
        at com.mysql.jdbc.Util.getInstance(Util.java:381)
        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1030)
        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)
        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3558)
        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3490)
        at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1959)
        at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2109)
        at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2642)
        at com.mysql.jdbc.StatementImpl.executeUpdate(StatementImpl.java:1647)
        at com.mysql.jdbc.StatementImpl.executeUpdate(StatementImpl.java:1566)
        at 
org.apache.hadoop.hive.ql.stats.jdbc.JDBCStatsPublisher.init(JDBCStatsPublisher.java:304)
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:411)
        at 
org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:137)
        at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
        at 
org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)
        at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1653)
        at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1412)
        at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
        at 
org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213)
        at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165)
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
        at 
org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:736)
        at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
        at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
2015-10-19 17:27:38,558 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(390)) - PLAN PATH = 
hdfs://bfdhadoop26/tmp/hive/hadoop/87b4cb59-82e2-4b8d-a66b-0ecd9587e14a/hive_2015-10-19_17-27-31_247_5519557068960011437-1/-mr-10003/d9d465cb-1b84-41d3-a23a-a4d6e511fe9c/map.xml
2015-10-19 17:27:38,559 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(390)) - PLAN PATH = 
hdfs://bfdhadoop26/tmp/hive/hadoop/87b4cb59-82e2-4b8d-a66b-0ecd9587e14a/hive_2015-10-19_17-27-31_247_5519557068960011437-1/-mr-10003/d9d465cb-1b84-41d3-a23a-a4d6e511fe9c/reduce.xml
2015-10-19 17:27:38,559 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(400)) - ***************non-local mode***************
2015-10-19 17:27:38,560 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(404)) - local path = 
hdfs://bfdhadoop26/tmp/hive/hadoop/87b4cb59-82e2-4b8d-a66b-0ecd9587e14a/hive_2015-10-19_17-27-31_247_5519557068960011437-1/-mr-10003/d9d465cb-1b84-41d3-a23a-a4d6e511fe9c/reduce.xml
2015-10-19 17:27:38,560 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(416)) - Open file to read in plan: 
hdfs://bfdhadoop26/tmp/hive/hadoop/87b4cb59-82e2-4b8d-a66b-0ecd9587e14a/hive_2015-10-19_17-27-31_247_5519557068960011437-1/-mr-10003/d9d465cb-1b84-41d3-a23a-a4d6e511fe9c/reduce.xml
2015-10-19 17:27:38,597 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(456)) - File not found: File does not exist: 
/tmp/hive/hadoop/87b4cb59-82e2-4b8d-a66b-0ecd9587e14a/hive_2015-10-19_17-27-31_247_5519557068960011437-1/-mr-10003/d9d465cb-1b84-41d3-a23a-a4d6e511fe9c/reduce.xml
        at 
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66)
        at 
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1891)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1832)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1812)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1784)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:542)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:362)
        at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2039)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2035)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2033)

2015-10-19 17:27:38,598 INFO  [main]: exec.Utilities 
(Utilities.java:getBaseWork(457)) - No plan file found: 
hdfs://bfdhadoop26/tmp/hive/hadoop/87b4cb59-82e2-4b8d-a66b-0ecd9587e14a/hive_2015-10-19_17-27-31_247_5519557068960011437-1/-mr-10003/d9d465cb-1b84-41d3-a23a-a4d6e511fe9c/reduce.xml

> a simple insert hql throws out NoClassFoundException of MetaException
> ---------------------------------------------------------------------
>
>                 Key: HIVE-12088
>                 URL: https://issues.apache.org/jira/browse/HIVE-12088
>             Project: Hive
>          Issue Type: Bug
>          Components: CLI
>    Affects Versions: 1.2.0, 1.2.1
>            Reporter: Feng Yuan
>             Fix For: 1.2.2
>
>         Attachments: hive.log
>
>
> example:
> from portrait.rec_feature_feedback a insert overwrite table portrait.test1 
> select iid, feedback_15day, feedback_7day, feedback_5day, feedback_3day, 
> feedback_1day where l_date = '2015-09-09' and bid in 
> ('949722CF_12F7_523A_EE21_E3D591B7E755');
> log shows:
> Query ID = hadoop_20151012153841_120bee59-56a7-4e53-9c45-76f97c0f50ad
> Total jobs = 3
> Launching Job 1 out of 3
> Number of reduce tasks is set to 0 since there's no reduce operator
> Starting Job = job_1441881651073_95266, Tracking URL = 
> http://bjlg-44p12-rm01:8088/proxy/application_1441881651073_95266/
> Kill Command = /opt/hadoop/hadoop/bin/hadoop job  -kill 
> job_1441881651073_95266
> Hadoop job information for Stage-1: number of mappers: 21; number of 
> reducers: 0
> 2015-10-12 15:39:29,930 Stage-1 map = 0%,  reduce = 0%
> 2015-10-12 15:39:39,597 Stage-1 map = 5%,  reduce = 0%
> 2015-10-12 15:39:40,658 Stage-1 map = 0%,  reduce = 0%
> 2015-10-12 15:39:53,479 Stage-1 map = 5%,  reduce = 0%
> 2015-10-12 15:39:54,535 Stage-1 map = 0%,  reduce = 0%
> 2015-10-12 15:39:55,588 Stage-1 map = 10%,  reduce = 0%
> 2015-10-12 15:39:56,626 Stage-1 map = 5%,  reduce = 0%
> 2015-10-12 15:39:57,687 Stage-1 map = 0%,  reduce = 0%
> 2015-10-12 15:40:06,096 Stage-1 map = 100%,  reduce = 0%
> Ended Job = job_1441881651073_95266 with errors
> Error during job, obtaining debugging information...
> Examining task ID: task_1441881651073_95266_m_000000 (and more) from job 
> job_1441881651073_95266
> Examining task ID: task_1441881651073_95266_m_000016 (and more) from job 
> job_1441881651073_95266
> Examining task ID: task_1441881651073_95266_m_000011 (and more) from job 
> job_1441881651073_95266
> Examining task ID: task_1441881651073_95266_m_000018 (and more) from job 
> job_1441881651073_95266
> Examining task ID: task_1441881651073_95266_m_000002 (and more) from job 
> job_1441881651073_95266
> Task with the most failures(4): 
> -----
> Task ID:
>   task_1441881651073_95266_m_000009
> URL:
>   
> http://0.0.0.0:8088/taskdetails.jsp?jobid=job_1441881651073_95266&tipid=task_1441881651073_95266_m_000009
> -----
> Diagnostic Messages for this Task:
> Error: java.lang.ClassNotFoundException: 
> org.apache.hadoop.hive.metastore.api.MetaException
>       at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
>       at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
>       at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
>       at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
>       at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
>       at java.lang.Class.getDeclaredMethods0(Native Method)
>       at java.lang.Class.privateGetDeclaredMethods(Class.java:2570)
>       at java.lang.Class.privateGetPublicMethods(Class.java:2690)
>       at java.lang.Class.getMethods(Class.java:1467)
>       at com.sun.beans.finder.MethodFinder$1.create(MethodFinder.java:54)
>       at com.sun.beans.finder.MethodFinder$1.create(MethodFinder.java:49)
>       at com.sun.beans.util.Cache.get(Cache.java:127)
>       at com.sun.beans.finder.MethodFinder.findMethod(MethodFinder.java:81)
>       at java.beans.Statement.getMethod(Statement.java:357)
>       at java.beans.Statement.invokeInternal(Statement.java:261)
>       at java.beans.Statement.access$000(Statement.java:58)
>       at java.beans.Statement$2.run(Statement.java:185)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at java.beans.Statement.invoke(Statement.java:182)
>       at java.beans.Expression.getValue(Expression.java:153)
>       at 
> com.sun.beans.decoder.ObjectElementHandler.getValueObject(ObjectElementHandler.java:166)
>       at 
> com.sun.beans.decoder.NewElementHandler.getValueObject(NewElementHandler.java:123)
>       at 
> com.sun.beans.decoder.ElementHandler.getContextBean(ElementHandler.java:113)
>       at 
> com.sun.beans.decoder.NewElementHandler.getContextBean(NewElementHandler.java:109)
>       at 
> com.sun.beans.decoder.ObjectElementHandler.getValueObject(ObjectElementHandler.java:146)
>       at 
> com.sun.beans.decoder.NewElementHandler.getValueObject(NewElementHandler.java:123)
>       at 
> com.sun.beans.decoder.ElementHandler.getContextBean(ElementHandler.java:113)
>       at 
> com.sun.beans.decoder.NewElementHandler.getContextBean(NewElementHandler.java:109)
>       at 
> com.sun.beans.decoder.ObjectElementHandler.getValueObject(ObjectElementHandler.java:146)
>       at 
> com.sun.beans.decoder.NewElementHandler.getValueObject(NewElementHandler.java:123)
>       at 
> com.sun.beans.decoder.ElementHandler.getContextBean(ElementHandler.java:113)
>       at 
> com.sun.beans.decoder.NewElementHandler.getContextBean(NewElementHandler.java:109)
>       at 
> com.sun.beans.decoder.ObjectElementHandler.getValueObject(ObjectElementHandler.java:146)
>       at 
> com.sun.beans.decoder.NewElementHandler.getValueObject(NewElementHandler.java:123)
>       at 
> com.sun.beans.decoder.ElementHandler.endElement(ElementHandler.java:169)
>       at 
> com.sun.beans.decoder.DocumentHandler.endElement(DocumentHandler.java:318)
>       at org.apache.xerces.parsers.AbstractSAXParser.endElement(Unknown 
> Source)
>       at 
> org.apache.xerces.impl.XMLDocumentFragmentScannerImpl.scanEndElement(Unknown 
> Source)
>       at 
> org.apache.xerces.impl.XMLDocumentFragmentScannerImpl$FragmentContentDispatcher.dispatch(Unknown
>  Source)
>       at 
> org.apache.xerces.impl.XMLDocumentFragmentScannerImpl.scanDocument(Unknown 
> Source)
>       at org.apache.xerces.parsers.XML11Configuration.parse(Unknown Source)
>       at org.apache.xerces.parsers.XML11Configuration.parse(Unknown Source)
>       at org.apache.xerces.parsers.XMLParser.parse(Unknown Source)
>       at org.apache.xerces.parsers.AbstractSAXParser.parse(Unknown Source)
>       at org.apache.xerces.jaxp.SAXParserImpl$JAXPSAXParser.parse(Unknown 
> Source)
>       at org.apache.xerces.jaxp.SAXParserImpl.parse(Unknown Source)
>       at com.sun.beans.decoder.DocumentHandler$1.run(DocumentHandler.java:375)
>       at com.sun.beans.decoder.DocumentHandler$1.run(DocumentHandler.java:372)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at 
> java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:76)
>       at com.sun.beans.decoder.DocumentHandler.parse(DocumentHandler.java:372)
>       at java.beans.XMLDecoder$1.run(XMLDecoder.java:201)
>       at java.beans.XMLDecoder$1.run(XMLDecoder.java:199)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at java.beans.XMLDecoder.parsingComplete(XMLDecoder.java:199)
>       at java.beans.XMLDecoder.readObject(XMLDecoder.java:250)
>       at 
> org.apache.hadoop.hive.ql.exec.Utilities.deserializeObjectByJavaXML(Utilities.java:1015)
>       at 
> org.apache.hadoop.hive.ql.exec.Utilities.deserializePlan(Utilities.java:928)
>       at 
> org.apache.hadoop.hive.ql.exec.Utilities.deserializePlan(Utilities.java:947)
>       at 
> org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:390)
>       at 
> org.apache.hadoop.hive.ql.exec.Utilities.getMapWork(Utilities.java:286)
>       at 
> org.apache.hadoop.hive.ql.io.HiveInputFormat.init(HiveInputFormat.java:263)
>       at 
> org.apache.hadoop.hive.ql.io.HiveInputFormat.pushProjectionsAndFilters(HiveInputFormat.java:478)
>       at 
> org.apache.hadoop.hive.ql.io.HiveInputFormat.pushProjectionsAndFilters(HiveInputFormat.java:471)
>       at 
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getRecordReader(CombineHiveInputFormat.java:648)
>       at 
> org.apache.hadoop.mapred.MapTask$TrackedRecordReader.<init>(MapTask.java:169)
>       at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:429)
>       at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
>       at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:415)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
>       at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
> FAILED: Execution Error, return code 2 from 
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask
> MapReduce Jobs Launched: 
> Stage-Stage-1: Map: 21   HDFS Read: 0 HDFS Write: 0 FAIL
> Total MapReduce CPU Time Spent: 0 msec



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to