Sorry to misleading, please run the following command: hive -e "DROP TABLE IF EXISTS kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 _8217_46e0_a9c6_4bd2757c50e9;
CREATE EXTERNAL TABLE IF NOT EXISTS kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 _8217_46e0_a9c6_4bd2757c50e9 ( GA_TALEND_ITEMSALES_STOREID string ) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\177' STORED AS SEQUENCEFILE LOCATION '/tmp/kylin-73f345f7-8217-46e0-a9c6-4bd2757c50e9/kylin_intermediate_TS T_CBE_19700101000000_2922789940817071255_73f345f7_8217_46e0_a9c6_4bd27 57c50e9'; SET mapreduce.job.split.metainfo.maxsize=-1; SET mapreduce.job.max.split.locations=2000; SET dfs.replication=2; SET hive.exec.compress.output=true; SET hive.auto.convert.join.noconditionaltask = true; SET hive.auto.convert.join.noconditionaltask.size = 300000000; INSERT OVERWRITE TABLE kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 _8217_46e0_a9c6_4bd2757c50e9 SELECT FACT_TABLE.STOREID FROM GA_TALEND.ITEMSALES as FACT_TABLE INNER JOIN GA_TALEND.STORE as LOOKUP_1 ON FACT_TABLE.STOREID = LOOKUP_1.STORECODE ; " Best Regard Zhou QianHao On 7/7/15, 1:35 PM, "Yaseen Mohammed" <[email protected]> wrote: >Hi , I am getting the below error is this a complete statement? > >[root@mapr-node4 ~]# hive -e "DROP TABLE IF EXISTS" > >Logging initialized using configuration in >jar:file:/opt/mapr/hive/hive-0.13/lib/hive-common-0.13.0-mapr-1504.jar!/hi >ve-log4j.properties >NoViableAltException(-1@[184:1: tableName : (db= identifier DOT tab= >identifier -> ^( TOK_TABNAME $db $tab) |tab= identifier -> ^( TOK_TABNAME >$tab) );]) > at org.antlr.runtime.DFA.noViableAlt(DFA.java:158) > at org.antlr.runtime.DFA.predict(DFA.java:144) > at >org.apache.hadoop.hive.ql.parse.HiveParser_FromClauseParser.tableName(Hive >Parser_FromClauseParser.java:4956) > at >org.apache.hadoop.hive.ql.parse.HiveParser.tableName(HiveParser.java:40849 >) > at >org.apache.hadoop.hive.ql.parse.HiveParser.dropTableStatement(HiveParser.j >ava:6405) > at >org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:21 >62) > at >org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1 >401) > at >org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1039) > at >org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:199) > at >org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:406) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:324) > at >org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:980) > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1045) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:916) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:906) > at >org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:268) > at >org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:220) > at >org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:423) > at >org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:359) > at >org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:742) > at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:686) > at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at >sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java: >57) > at >sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorIm >pl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at org.apache.hadoop.util.RunJar.main(RunJar.java:212) >FAILED: ParseException line 1:20 cannot recognize input near '<EOF>' >'<EOF>' '<EOF>' in table name > > >-----Original Message----- >From: 周千昊 [mailto:[email protected]] >Sent: Monday, July 06, 2015 7:27 PM >To: [email protected] >Subject: Re: Error Running Cube. > >Hi, Mohammed > would you please run >* hive -e '*DROP TABLE IF EXISTS ...*'* > in your CLI to see if there is anything wrong which should be the >root cause. > BTW, the log is a known issue which will be fixed in the next >release. > > >Yaseen Mohammed <[email protected]>于2015年7月7日周二 上午6:09写道: > >> Hi All, >> We are encountering issues with cube build job when running getting >> the following error. Could you please help us. >> >> [pool-4-thread-8]:[2015-07-06 >> 13:11:00,766][INFO][org.apache.kylin.job.manager.ExecutableManager.upd >> ateJobOutput(ExecutableManager.java:222)] >> - job id:73f345f7-8217-46e0-a9c6-4bd2757c50e9-00 from READY to RUNNING >> [pool-4-thread-8]:[2015-07-06 >> 13:11:00,767][INFO][org.apache.kylin.job.common.ShellExecutable.doWork >> (ShellExecutable.java:50)] >> - executing:hive -e "DROP TABLE IF EXISTS >> kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 >> _8217_46e0_a9c6_4bd2757c50e9; >> >> CREATE EXTERNAL TABLE IF NOT EXISTS >> kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 >> _8217_46e0_a9c6_4bd2757c50e9 >> ( >> GA_TALEND_ITEMSALES_STOREID string >> ) >> ROW FORMAT DELIMITED FIELDS TERMINATED BY '\177' >> STORED AS SEQUENCEFILE >> LOCATION >> '/tmp/kylin-73f345f7-8217-46e0-a9c6-4bd2757c50e9/kylin_intermediate_TS >> T_CBE_19700101000000_2922789940817071255_73f345f7_8217_46e0_a9c6_4bd27 >> 57c50e9'; >> >> SET mapreduce.job.split.metainfo.maxsize=-1; >> SET mapreduce.job.max.split.locations=2000; >> SET dfs.replication=2; >> SET hive.exec.compress.output=true; >> SET hive.auto.convert.join.noconditionaltask = true; SET >> hive.auto.convert.join.noconditionaltask.size = 300000000; INSERT >> OVERWRITE TABLE >> kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 >> _8217_46e0_a9c6_4bd2757c50e9 >> SELECT >> FACT_TABLE.STOREID >> FROM GA_TALEND.ITEMSALES as FACT_TABLE INNER JOIN GA_TALEND.STORE as >> LOOKUP_1 ON FACT_TABLE.STOREID = LOOKUP_1.STORECODE ; >> >> " >> [pool-4-thread-8]:[2015-07-06 >> 13:11:18,495][ERROR][org.apache.kylin.job.common.ShellExecutable.doWor >> k(ShellExecutable.java:56)] >> - job:73f345f7-8217-46e0-a9c6-4bd2757c50e9-00 execute finished with >> exception >> java.io.IOException: OS command error exit with 2 -- hive -e "DROP >> TABLE IF EXISTS >> kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 >> _8217_46e0_a9c6_4bd2757c50e9; >> >> CREATE EXTERNAL TABLE IF NOT EXISTS >> kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 >> _8217_46e0_a9c6_4bd2757c50e9 >> ( >> GA_TALEND_ITEMSALES_STOREID string >> ) >> ROW FORMAT DELIMITED FIELDS TERMINATED BY '\177' >> STORED AS SEQUENCEFILE >> LOCATION >> '/tmp/kylin-73f345f7-8217-46e0-a9c6-4bd2757c50e9/kylin_intermediate_TS >> T_CBE_19700101000000_2922789940817071255_73f345f7_8217_46e0_a9c6_4bd27 >> 57c50e9'; >> >> SET mapreduce.job.split.metainfo.maxsize=-1; >> SET mapreduce.job.max.split.locations=2000; >> SET dfs.replication=2; >> SET hive.exec.compress.output=true; >> SET hive.auto.convert.join.noconditionaltask = true; SET >> hive.auto.convert.join.noconditionaltask.size = 300000000; INSERT >> OVERWRITE TABLE >> kylin_intermediate_TST_CBE_19700101000000_2922789940817071255_73f345f7 >> _8217_46e0_a9c6_4bd2757c50e9 >> SELECT >> FACT_TABLE.STOREID >> FROM GA_TALEND.ITEMSALES as FACT_TABLE INNER JOIN GA_TALEND.STORE as >> LOOKUP_1 ON FACT_TABLE.STOREID = LOOKUP_1.STORECODE ; >> >> " >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> line >> >> at >> >>org.apache.kylin.common.util.CliCommandExecutor.execute(CliCommandExecuto >>r.java:91) >> at >> >>org.apache.kylin.job.common.ShellExecutable.doWork(ShellExecutable.java:5 >>2) >> at >> >>org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecuta >>ble.java:107) >> at >> >>org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultCha >>inedExecutable.java:50) >> at >> >>org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecuta >>ble.java:107) >> at >> >>org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(Defau >>ltScheduler.java:132) >> at >> >>java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java >>:1145) >> at >> >>java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.jav >>a:615) >> at java.lang.Thread.run(Thread.java:745) >> [pool-4-thread-8]:[2015-07-06 >> 13:11:18,504][DEBUG][org.apache.kylin.common.persistence.ResourceStore >> .putResource(ResourceStore.java:171)] >> - Saving resource >> /execute_output/73f345f7-8217-46e0-a9c6-4bd2757c50e9-00 >> (Store kylin_metadata@hbase) >> [pool-4-thread-8]:[2015-07-06 >> 13:11:18,524][DEBUG][org.apache.kylin.common.persistence.ResourceStore >> .putResource(ResourceStore.java:171)] >> - Saving resource >> /execute_output/73f345f7-8217-46e0-a9c6-4bd2757c50e9-00 >> (Store kylin_metadata@hbase) >> [pool-4-thread-8]:[2015-07-06 >> 13:11:18,528][INFO][org.apache.kylin.job.manager.ExecutableManager.upd >> ateJobOutput(ExecutableManager.java:222)] >> - job id:73f345f7-8217-46e0-a9c6-4bd2757c50e9-00 from RUNNING to ERROR >> >> >>
