[
https://issues.apache.org/jira/browse/PHOENIX-4804?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
TracyGao01 updated PHOENIX-4804:
--------------------------------
Summary: Use 'org.apache.phoenix.hive.PhoenixStorageHandler'
ERROR:'Undefined column. columnName=USER ' (was: Use
'org.apache.phoenix.hive.PhoenixStorageHandler' ERROR:'' )
> Use 'org.apache.phoenix.hive.PhoenixStorageHandler' ERROR:'Undefined column.
> columnName=USER '
> -----------------------------------------------------------------------------------------------
>
> Key: PHOENIX-4804
> URL: https://issues.apache.org/jira/browse/PHOENIX-4804
> Project: Phoenix
> Issue Type: Bug
> Affects Versions: 4.13.2-cdh5.11.2
> Environment: *hive version:*hive 1.1.0-cdh5.11.0
> *phoenix version:*4.13.2-cdh5.11.2
> *hbase version:*1.2.0-cdh5.11.0
> Reporter: TracyGao01
> Priority: Critical
> Fix For: 4.13.2-cdh5.11.2
>
> Attachments: image-2018-07-05-13-50-36-972.png,
> image-2018-07-05-13-52-28-898.png
>
>
> *Environment:*
> *hive version:*hive 1.1.0-cdh5.11.0
> *phoenix version:*4.13.2-cdh5.11.2
> *hbase version:*1.2.0-cdh5.11.0
> *ERROR:*
> I used 'org.apache.phoenix.hive.PhoenixStorageHandler' , for example:
> First,i create a hive external table:
>
> {code:java}
> create external table USER (
> id string,
> name string )
> STORED BY 'org.apache.phoenix.hive.PhoenixStorageHandler'
> TBLPROPERTIES ( "phoenix.table.name" = "USER",
> "phoenix.zookeeper.quorum" = "BigData-Dev-1,BigData-Dev-2,BigData-Dev-3",
> "phoenix.zookeeper.znode.parent" = "/hbase",
> "phoenix.zookeeper.client.port" = "2181",
> "phoenix.rowkeys" = "ID",
> "phoenix.column.mapping" = "id:ID,name:name" );
> {code}
> Then,I excute ' select * from USER;',it's OK:
> !image-2018-07-05-13-50-36-972.png!
> But,when I excute query with "where",for example "select * from USER where id
> = '1';",it's error:
> !image-2018-07-05-13-52-28-898.png!
>
> {code:java}
> 18/07/05 11:57:43 [HiveServer2-Background-Pool: Thread-147]: WARN
> mapreduce.JobResourceUploader: Hadoop command-line option parsing not
> performed. Implement the Tool interface and execute your application with
> ToolRunner to remedy this. 18/07/05 11:57:46 [HiveServer2-Background-Pool:
> Thread-147]: ERROR mapreduce.PhoenixInputFormat: Failed to get the query plan
> with error [ERROR 504 (42703): Undefined column. columnName=USER]
> java.lang.RuntimeException:
> org.apache.phoenix.schema.ColumnNotFoundException: ERROR 504 (42703):
> Undefined column. columnName=USER at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:266)
> at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getSplits(PhoenixInputFormat.java:131)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:306)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:408)
> at
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getCombineSplits(CombineHiveInputFormat.java:363)
> at
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getSplits(CombineHiveInputFormat.java:534)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.writeOldSplits(JobSubmitter.java:332)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:324)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:200)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1307) at
> org.apache.hadoop.mapreduce.Job$10.run(Job.java:1304) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1304) at
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:578) at
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:573) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:573)
> at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:564) at
> org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:418) at
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:142) at
> org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1979) at
> org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1692) at
> org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1424) at
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1208) at
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1203) at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:237)
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:88)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3$1.run(SQLOperation.java:293)
> at java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3.run(SQLOperation.java:306)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748) Caused by:
> org.apache.phoenix.schema.ColumnNotFoundException: ERROR 504 (42703):
> Undefined column. columnName=USER at
> org.apache.phoenix.schema.PTableImpl.getColumnForColumnName(PTableImpl.java:828)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.resolveColumn(FromCompiler.java:475)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler$ColumnRefVisitor.visit(TupleProjectionCompiler.java:207)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler$ColumnRefVisitor.visit(TupleProjectionCompiler.java:193)
> at org.apache.phoenix.parse.ColumnParseNode.accept(ColumnParseNode.java:56)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler.createProjectedTable(TupleProjectionCompiler.java:109)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSingleFlatQuery(QueryCompiler.java:528)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSingleQuery(QueryCompiler.java:507)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSelect(QueryCompiler.java:202)
> at org.apache.phoenix.compile.QueryCompiler.compile(QueryCompiler.java:157)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:476)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:442)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.compileQuery(PhoenixStatement.java:1679)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.compileQuery(PhoenixStatement.java:1672)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.optimizeQuery(PhoenixStatement.java:1666)
> at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:260)
> ... 42 more Job Submission failed with exception
> 'java.lang.RuntimeException(org.apache.phoenix.schema.ColumnNotFoundException:
> ERROR 504 (42703): Undefined column. columnName=USER)' 18/07/05 11:57:46
> [HiveServer2-Background-Pool: Thread-147]: ERROR exec.Task: Job Submission
> failed with exception
> 'java.lang.RuntimeException(org.apache.phoenix.schema.ColumnNotFoundException:
> ERROR 504 (42703): Undefined column. columnName=USER)'
> java.lang.RuntimeException:
> org.apache.phoenix.schema.ColumnNotFoundException: ERROR 504 (42703):
> Undefined column. columnName=USER at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:266)
> at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getSplits(PhoenixInputFormat.java:131)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:306)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:408)
> at
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getCombineSplits(CombineHiveInputFormat.java:363)
> at
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getSplits(CombineHiveInputFormat.java:534)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.writeOldSplits(JobSubmitter.java:332)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:324)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:200)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1307) at
> org.apache.hadoop.mapreduce.Job$10.run(Job.java:1304) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1304) at
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:578) at
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:573) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:573)
> at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:564) at
> org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:418) at
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:142) at
> org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1979) at
> org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1692) at
> org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1424) at
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1208) at
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1203) at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:237)
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:88)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3$1.run(SQLOperation.java:293)
> at java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3.run(SQLOperation.java:306)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748) Caused by:
> org.apache.phoenix.schema.ColumnNotFoundException: ERROR 504 (42703):
> Undefined column. columnName=USER at
> org.apache.phoenix.schema.PTableImpl.getColumnForColumnName(PTableImpl.java:828)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.resolveColumn(FromCompiler.java:475)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler$ColumnRefVisitor.visit(TupleProjectionCompiler.java:207)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler$ColumnRefVisitor.visit(TupleProjectionCompiler.java:193)
> at org.apache.phoenix.parse.ColumnParseNode.accept(ColumnParseNode.java:56)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler.createProjectedTable(TupleProjectionCompiler.java:109)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSingleFlatQuery(QueryCompiler.java:528)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSingleQuery(QueryCompiler.java:507)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSelect(QueryCompiler.java:202)
> at org.apache.phoenix.compile.QueryCompiler.compile(QueryCompiler.java:157)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:476)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:442)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.compileQuery(PhoenixStatement.java:1679)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.compileQuery(PhoenixStatement.java:1672)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.optimizeQuery(PhoenixStatement.java:1666)
> at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:260)
> ... 42 more FAILED: Execution Error, return code 1 from
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask 18/07/05 11:57:46
> [HiveServer2-Background-Pool: Thread-147]: ERROR ql.Driver: FAILED: Execution
> Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
> 18/07/05 11:57:46 [HiveServer2-Background-Pool: Thread-147]: ERROR
> operation.Operation: Error running hive query:
> org.apache.hive.service.cli.HiveSQLException: Error while processing
> statement: FAILED: Execution Error, return code 1 from
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask at
> org.apache.hive.service.cli.operation.Operation.toSQLException(Operation.java:400)
> at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:239)
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:88)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3$1.run(SQLOperation.java:293)
> at java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3.run(SQLOperation.java:306)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748) ERROR : Job Submission failed with
> exception
> 'java.lang.RuntimeException(org.apache.phoenix.schema.ColumnNotFoundException:
> ERROR 504 (42703): Undefined column. columnName=USER)'
> java.lang.RuntimeException:
> org.apache.phoenix.schema.ColumnNotFoundException: ERROR 504 (42703):
> Undefined column. columnName=USER at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:266)
> at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getSplits(PhoenixInputFormat.java:131)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:306)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:408)
> at
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getCombineSplits(CombineHiveInputFormat.java:363)
> at
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getSplits(CombineHiveInputFormat.java:534)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.writeOldSplits(JobSubmitter.java:332)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:324)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:200)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1307) at
> org.apache.hadoop.mapreduce.Job$10.run(Job.java:1304) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1304) at
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:578) at
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:573) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:573)
> at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:564) at
> org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:418) at
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:142) at
> org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1979) at
> org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1692) at
> org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1424) at
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1208) at
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1203) at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:237)
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:88)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3$1.run(SQLOperation.java:293)
> at java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
> at
> org.apache.hive.service.cli.operation.SQLOperation$3.run(SQLOperation.java:306)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748) Caused by:
> org.apache.phoenix.schema.ColumnNotFoundException: ERROR 504 (42703):
> Undefined column. columnName=USER at
> org.apache.phoenix.schema.PTableImpl.getColumnForColumnName(PTableImpl.java:828)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.resolveColumn(FromCompiler.java:475)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler$ColumnRefVisitor.visit(TupleProjectionCompiler.java:207)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler$ColumnRefVisitor.visit(TupleProjectionCompiler.java:193)
> at org.apache.phoenix.parse.ColumnParseNode.accept(ColumnParseNode.java:56)
> at
> org.apache.phoenix.compile.TupleProjectionCompiler.createProjectedTable(TupleProjectionCompiler.java:109)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSingleFlatQuery(QueryCompiler.java:528)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSingleQuery(QueryCompiler.java:507)
> at
> org.apache.phoenix.compile.QueryCompiler.compileSelect(QueryCompiler.java:202)
> at org.apache.phoenix.compile.QueryCompiler.compile(QueryCompiler.java:157)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:476)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:442)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.compileQuery(PhoenixStatement.java:1679)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.compileQuery(PhoenixStatement.java:1672)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.optimizeQuery(PhoenixStatement.java:1666)
> at
> org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:260)
> ... 42 more ERROR : FAILED: Execution Error, return code 1 from
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask Error: Error while processing
> statement: FAILED: Execution Error, return code 1 from
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask (state=08S01,code=1){code}
> comments:User is table name,why the error is :
> {code:java}
> Undefined column. columnName=USER {code}
>
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)