[
https://issues.apache.org/jira/browse/HIVE-26926?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Denys Kuzmenko updated HIVE-26926:
----------------------------------
Issue Type: Improvement (was: Bug)
> SHOW PARTITIONS for a non partitioned table should just throw execution error
> instead of full stack trace.
> ----------------------------------------------------------------------------------------------------------
>
> Key: HIVE-26926
> URL: https://issues.apache.org/jira/browse/HIVE-26926
> Project: Hive
> Issue Type: Improvement
> Reporter: Dharmik Thakkar
> Priority: Critical
>
> SHOW PARTITIONS for a non partitioned table should just throw execution error
> instead of full stack trace.
> STR:
> # create table test (id int);
> # show partitions test;
> Actual Output
> {code:java}
> 0: jdbc:hive2://hs2-qe-vw-dwx-hive-nnbm.dw-dw> create table test (id int);
> INFO : Compiling
> command(queryId=hive_20230110210715_637ef126-bb53-4624-9a72-d36f13f98a93):
> create table test (id int)
> INFO : Semantic Analysis Completed (retrial = false)
> INFO : Created Hive schema: Schema(fieldSchemas:null, properties:null)
> INFO : Completed compiling
> command(queryId=hive_20230110210715_637ef126-bb53-4624-9a72-d36f13f98a93);
> Time taken: 0.036 seconds
> INFO : Executing
> command(queryId=hive_20230110210715_637ef126-bb53-4624-9a72-d36f13f98a93):
> create table test (id int)
> INFO : Starting task [Stage-0:DDL] in serial mode
> INFO : Completed executing
> command(queryId=hive_20230110210715_637ef126-bb53-4624-9a72-d36f13f98a93);
> Time taken: 0.507 seconds
> INFO : OK
> No rows affected (0.809 seconds)
> 0: jdbc:hive2://hs2-qe-vw-dwx-hive-nnbm.dw-dw> show partitions test;
> INFO : Compiling
> command(queryId=hive_20230110210721_d1f38a5b-fe4e-4847-a3c2-5a85a95c29eb):
> show partitions test
> INFO : Semantic Analysis Completed (retrial = false)
> INFO : Created Hive schema: Schema(fieldSchemas:[FieldSchema(name:partition,
> type:string, comment:from deserializer)], properties:null)
> INFO : Completed compiling
> command(queryId=hive_20230110210721_d1f38a5b-fe4e-4847-a3c2-5a85a95c29eb);
> Time taken: 0.03 seconds
> INFO : Executing
> command(queryId=hive_20230110210721_d1f38a5b-fe4e-4847-a3c2-5a85a95c29eb):
> show partitions test
> INFO : Starting task [Stage-0:DDL] in serial mode
> ERROR : Failed
> org.apache.hadoop.hive.ql.metadata.HiveException: Table test is not a
> partitioned table
> at
> org.apache.hadoop.hive.ql.ddl.table.partition.show.ShowPartitionsOperation.execute(ShowPartitionsOperation.java:44)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:360)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:333)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:250)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:111)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:809)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:547)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:541)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:232)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:89)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:338)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at java.security.AccessController.doPrivileged(Native Method) ~[?:?]
> at javax.security.auth.Subject.doAs(Subject.java:423) ~[?:?]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
> ~[hadoop-common-3.1.1.7.2.15.4-6.jar:?]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:358)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?]
> at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?]
> at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> ~[?:?]
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> ~[?:?]
> at java.lang.Thread.run(Thread.java:829) [?:?]
> ERROR : DDLTask failed, DDL Operation: class
> org.apache.hadoop.hive.ql.ddl.table.partition.show.ShowPartitionsOperation
> org.apache.hadoop.hive.ql.metadata.HiveException: Table test is not a
> partitioned table
> at
> org.apache.hadoop.hive.ql.ddl.table.partition.show.ShowPartitionsOperation.execute(ShowPartitionsOperation.java:44)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:360)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:333)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:250)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:111)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:809)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:547)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:541)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166)
> ~[hive-exec-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:232)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:89)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:338)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at java.security.AccessController.doPrivileged(Native Method) ~[?:?]
> at javax.security.auth.Subject.doAs(Subject.java:423) ~[?:?]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
> ~[hadoop-common-3.1.1.7.2.15.4-6.jar:?]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:358)
> ~[hive-service-3.1.3000.2022.0.13.0-72.jar:3.1.3000.2022.0.13.0-72]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?]
> at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?]
> at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> ~[?:?]
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> ~[?:?]
> at java.lang.Thread.run(Thread.java:829) [?:?]
> ERROR : FAILED: Execution Error, return code 10241 from
> org.apache.hadoop.hive.ql.ddl.DDLTask. Table test is not a partitioned table
> INFO : Completed executing
> command(queryId=hive_20230110210721_d1f38a5b-fe4e-4847-a3c2-5a85a95c29eb);
> Time taken: 0.016 seconds
> INFO : OK
> Error: Error while compiling statement: FAILED: Execution Error, return code
> 10241 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table test is not a
> partitioned table (state=42000,code=10241) {code}
> Instead of printing full stack trace we should show
> {code:java}
> Error: Error while compiling statement: FAILED: Execution Error, return code
> 10241 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table test is not a
> partitioned table (state=42000,code=10241) {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)