[
https://issues.apache.org/jira/browse/HIVE-17101?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Kavan Suresh updated HIVE-17101:
--------------------------------
Description:
When a drop table call on a managed table fails to drop data due to some
permission issues, the relevant exceptions from metastore fail to show up on
the client. When a table with the same name is created again it shows data from
the previous time.
{code:java}
0: jdbc:hive2://ctr-e133-1493418528701-190793> drop table source;
No rows affected (0.534 seconds)
0: jdbc:hive2://ctr-e133-1493418528701-190793> create table source (name
varchar(50), age tinyint, num_clicks bigint) clustered by (name) into 2 buckets
stored as orc tblproperties('transactional'='true');
No rows affected (0.256 seconds)
0: jdbc:hive2://ctr-e133-1493418528701-190793> select * from source;
+--------------------+-------------------+--------------------------+--+
| source.name | source.age | source.num_clicks |
+--------------------+-------------------+--------------------------+--+
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| cal | 21 | 431 |
| cal | 21 | 431 |
| cal | 21 | 431 |
| cal | 21 | 431 |
| cal | 21 | 431 |
+--------------------+-------------------+--------------------------+--+
15 rows selected (0.223 seconds)
{code}
Hiveserver2 logs:
{noformat}
2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
ql.Driver (Driver.java:execute(1411)) - Starting
command(queryId=hive_20170714003611_e82a07e3-83aa-451e-b4dd-6beb7382c981): drop
table source
2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
hooks.ATSHook (ATSHook.java:<init>(114)) - Created ATS Hook
2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
log.PerfLogger (PerfLogger.java:PerfLogBegin(149)) - <PERFLOG
method=PreHook.org.apache.hadoop.hive.ql.hooks.ATSHook
from=org.apache.hadoop.hive.ql.Driver>
2017-07-14 00:36:11,540 INFO [HiveServer2-Background-Pool: Thread-1214]:
log.PerfLogger (PerfLogger.java:PerfLogEnd(177)) - </PERFLOG
method=PreHook.org.apache.hadoop.hive.ql.hooks.ATSHook start=1499992571539
end=1499992571540 duration=1 from=org.apache.hadoop.hive.ql.Driver>
2017-07-14 00:36:11,540 INFO [HiveServer2-Background-Pool: Thread-1214]:
log.PerfLogger (PerfLogger.java:PerfLogBegin(149)) - <PERFLOG method=runTasks
from=org.apache.hadoop.hive.ql.Driver>
2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
ql.Driver (Driver.java:launchTask(1746)) - Starting task [Stage-0:DDL] in
serial mode
2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: get_table :
db=default tbl=source
2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
ip=unknown-ip-addr cmd=get_table : db=default tbl=source
2017-07-14 00:36:11,670 WARN [HiveServer2-Background-Pool: Thread-1214]:
conf.HiveConf (HiveConf.java:initialize(3067)) - HiveConf of name
hive.internal.ss.authz.settings.applied.marker does not exist
2017-07-14 00:36:11,670 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:newRawStoreForConf(705)) - 42:
Opening raw store with implementation
class:org.apache.hadoop.hive.metastore.ObjectStore
2017-07-14 00:36:11,678 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.ObjectStore (ObjectStore.java:initializeHelper(377)) - ObjectStore,
initialize called
2017-07-14 00:36:11,683 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.MetaStoreDirectSql (MetaStoreDirectSql.java:<init>(163)) - Using
direct SQL, underlying DB is ORACLE
2017-07-14 00:36:11,683 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.ObjectStore (ObjectStore.java:setConf(291)) - Initialized ObjectStore
2017-07-14 00:36:11,698 INFO [HiveServer2-Background-Pool: Thread-1214]:
metadata.HiveUtils (HiveUtils.java:getMetaStoreAuthorizeProviderManagers(355))
- Adding metastore authorization provider:
org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
2017-07-14 00:36:11,700 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: get_table :
db=default tbl=source
2017-07-14 00:36:11,701 INFO [HiveServer2-Background-Pool: Thread-1214]:
HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
ip=unknown-ip-addr cmd=get_table : db=default tbl=source
2017-07-14 00:36:11,713 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: drop_table :
db=default tbl=source
2017-07-14 00:36:11,714 INFO [HiveServer2-Background-Pool: Thread-1214]:
HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
ip=unknown-ip-addr cmd=drop_table : db=default tbl=source
2017-07-14 00:36:11,985 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.hivemetastoressimpl (HiveMetaStoreFsImpl.java:deleteDir(41)) -
deleting
hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/source
2017-07-14 00:36:11,994 ERROR [HiveServer2-Background-Pool: Thread-1214]:
hive.log (MetaStoreUtils.java:logAndThrowMetaException(1254)) - Got exception:
java.io.IOException Failed to move to trash:
hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/source
java.io.IOException: Failed to move to trash:
hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/source
at
org.apache.hadoop.fs.TrashPolicyDefault.moveToTrash(TrashPolicyDefault.java:174)
at org.apache.hadoop.fs.Trash.moveToTrash(Trash.java:109)
at org.apache.hadoop.fs.Trash.moveToAppropriateTrash(Trash.java:95)
at
org.apache.hadoop.hive.shims.Hadoop23Shims.moveToAppropriateTrash(Hadoop23Shims.java:262)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreFsImpl.deleteDir(HiveMetaStoreFsImpl.java:47)
at
org.apache.hadoop.hive.metastore.Warehouse.deleteDir(Warehouse.java:236)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.deleteTableData(HiveMetaStore.java:1749)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_table_core(HiveMetaStore.java:1715)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_table_with_environment_context(HiveMetaStore.java:1879)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105)
at com.sun.proxy.$Proxy13.drop_table_with_environment_context(Unknown
Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.drop_table_with_environment_context(HiveMetaStoreClient.java:2122)
at
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.drop_table_with_environment_context(SessionHiveMetaStoreClient.java:120)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:954)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:890)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:178)
at com.sun.proxy.$Proxy14.dropTable(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1112)
at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1049)
at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:4084)
at
org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3940)
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:341)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
at
org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1748)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1494)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1291)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1158)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1153)
at
org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:197)
at
org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:76)
at
org.apache.hive.service.cli.operation.SQLOperation$2$1.run(SQLOperation.java:253)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
at
org.apache.hive.service.cli.operation.SQLOperation$2.run(SQLOperation.java:264)
at
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
{noformat}
was:
When a drop table call on a managed table fails to drop data due to some
permission issues, the relevant exceptions from metastore fail to show up on
the client. When a table with the same name is created again it shows data from
the previous time.
{code:java}
0: jdbc:hive2://ctr-e133-1493418528701-190793> drop table source;
No rows affected (0.534 seconds)
0: jdbc:hive2://ctr-e133-1493418528701-190793> create table source (name
varchar(50), age tinyint, num_clicks bigint) clustered by (name) into 2 buckets
stored as orc tblproperties('transactional'='true');
No rows affected (0.256 seconds)
0: jdbc:hive2://ctr-e133-1493418528701-190793> select * from source;
+--------------------+-------------------+--------------------------+--+
| source.name | source.age | source.num_clicks |
+--------------------+-------------------+--------------------------+--+
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| amy | 35 | 12341234 |
| bob | 66 | 123471234871239847 |
| cal | 21 | 431 |
| cal | 21 | 431 |
| cal | 21 | 431 |
| cal | 21 | 431 |
| cal | 21 | 431 |
+--------------------+-------------------+--------------------------+--+
15 rows selected (0.223 seconds)
{code}
Hiveserver2 logs:
{noformat}
2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
ql.Driver (Driver.java:execute(1411)) - Starting
command(queryId=hive_20170714003611_e82a07e3-83aa-451e-b4dd-6beb7382c981): drop
table merge_source
2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
hooks.ATSHook (ATSHook.java:<init>(114)) - Created ATS Hook
2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
log.PerfLogger (PerfLogger.java:PerfLogBegin(149)) - <PERFLOG
method=PreHook.org.apache.hadoop.hive.ql.hooks.ATSHook
from=org.apache.hadoop.hive.ql.Driver>
2017-07-14 00:36:11,540 INFO [HiveServer2-Background-Pool: Thread-1214]:
log.PerfLogger (PerfLogger.java:PerfLogEnd(177)) - </PERFLOG
method=PreHook.org.apache.hadoop.hive.ql.hooks.ATSHook start=1499992571539
end=1499992571540 duration=1 from=org.apache.hadoop.hive.ql.Driver>
2017-07-14 00:36:11,540 INFO [HiveServer2-Background-Pool: Thread-1214]:
log.PerfLogger (PerfLogger.java:PerfLogBegin(149)) - <PERFLOG method=runTasks
from=org.apache.hadoop.hive.ql.Driver>
2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
ql.Driver (Driver.java:launchTask(1746)) - Starting task [Stage-0:DDL] in
serial mode
2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: get_table :
db=default tbl=merge_source
2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
ip=unknown-ip-addr cmd=get_table : db=default tbl=merge_source
2017-07-14 00:36:11,670 WARN [HiveServer2-Background-Pool: Thread-1214]:
conf.HiveConf (HiveConf.java:initialize(3067)) - HiveConf of name
hive.internal.ss.authz.settings.applied.marker does not exist
2017-07-14 00:36:11,670 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:newRawStoreForConf(705)) - 42:
Opening raw store with implementation
class:org.apache.hadoop.hive.metastore.ObjectStore
2017-07-14 00:36:11,678 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.ObjectStore (ObjectStore.java:initializeHelper(377)) - ObjectStore,
initialize called
2017-07-14 00:36:11,683 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.MetaStoreDirectSql (MetaStoreDirectSql.java:<init>(163)) - Using
direct SQL, underlying DB is ORACLE
2017-07-14 00:36:11,683 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.ObjectStore (ObjectStore.java:setConf(291)) - Initialized ObjectStore
2017-07-14 00:36:11,698 INFO [HiveServer2-Background-Pool: Thread-1214]:
metadata.HiveUtils (HiveUtils.java:getMetaStoreAuthorizeProviderManagers(355))
- Adding metastore authorization provider:
org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
2017-07-14 00:36:11,700 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: get_table :
db=default tbl=merge_source
2017-07-14 00:36:11,701 INFO [HiveServer2-Background-Pool: Thread-1214]:
HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
ip=unknown-ip-addr cmd=get_table : db=default tbl=merge_source
2017-07-14 00:36:11,713 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: drop_table :
db=default tbl=merge_source
2017-07-14 00:36:11,714 INFO [HiveServer2-Background-Pool: Thread-1214]:
HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
ip=unknown-ip-addr cmd=drop_table : db=default tbl=merge_source
2017-07-14 00:36:11,985 INFO [HiveServer2-Background-Pool: Thread-1214]:
metastore.hivemetastoressimpl (HiveMetaStoreFsImpl.java:deleteDir(41)) -
deleting
hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/merge_source
2017-07-14 00:36:11,994 ERROR [HiveServer2-Background-Pool: Thread-1214]:
hive.log (MetaStoreUtils.java:logAndThrowMetaException(1254)) - Got exception:
java.io.IOException Failed to move to trash:
hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/merge_source
java.io.IOException: Failed to move to trash:
hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/merge_source
at
org.apache.hadoop.fs.TrashPolicyDefault.moveToTrash(TrashPolicyDefault.java:174)
at org.apache.hadoop.fs.Trash.moveToTrash(Trash.java:109)
at org.apache.hadoop.fs.Trash.moveToAppropriateTrash(Trash.java:95)
at
org.apache.hadoop.hive.shims.Hadoop23Shims.moveToAppropriateTrash(Hadoop23Shims.java:262)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreFsImpl.deleteDir(HiveMetaStoreFsImpl.java:47)
at
org.apache.hadoop.hive.metastore.Warehouse.deleteDir(Warehouse.java:236)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.deleteTableData(HiveMetaStore.java:1749)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_table_core(HiveMetaStore.java:1715)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_table_with_environment_context(HiveMetaStore.java:1879)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105)
at com.sun.proxy.$Proxy13.drop_table_with_environment_context(Unknown
Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.drop_table_with_environment_context(HiveMetaStoreClient.java:2122)
at
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.drop_table_with_environment_context(SessionHiveMetaStoreClient.java:120)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:954)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:890)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:178)
at com.sun.proxy.$Proxy14.dropTable(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1112)
at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1049)
at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:4084)
at
org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3940)
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:341)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
at
org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1748)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1494)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1291)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1158)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1153)
at
org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:197)
at
org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:76)
at
org.apache.hive.service.cli.operation.SQLOperation$2$1.run(SQLOperation.java:253)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
at
org.apache.hive.service.cli.operation.SQLOperation$2.run(SQLOperation.java:264)
at
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
{noformat}
> Hive drop table fails to throw exceptions on client when data deletion does
> not succeed
> ---------------------------------------------------------------------------------------
>
> Key: HIVE-17101
> URL: https://issues.apache.org/jira/browse/HIVE-17101
> Project: Hive
> Issue Type: Bug
> Components: Hive
> Reporter: Kavan Suresh
>
> When a drop table call on a managed table fails to drop data due to some
> permission issues, the relevant exceptions from metastore fail to show up on
> the client. When a table with the same name is created again it shows data
> from the previous time.
> {code:java}
> 0: jdbc:hive2://ctr-e133-1493418528701-190793> drop table source;
> No rows affected (0.534 seconds)
> 0: jdbc:hive2://ctr-e133-1493418528701-190793> create table source (name
> varchar(50), age tinyint, num_clicks bigint) clustered by (name) into 2
> buckets stored as orc tblproperties('transactional'='true');
> No rows affected (0.256 seconds)
> 0: jdbc:hive2://ctr-e133-1493418528701-190793> select * from source;
> +--------------------+-------------------+--------------------------+--+
> | source.name | source.age | source.num_clicks |
> +--------------------+-------------------+--------------------------+--+
> | amy | 35 | 12341234 |
> | bob | 66 | 123471234871239847 |
> | amy | 35 | 12341234 |
> | bob | 66 | 123471234871239847 |
> | amy | 35 | 12341234 |
> | bob | 66 | 123471234871239847 |
> | amy | 35 | 12341234 |
> | bob | 66 | 123471234871239847 |
> | amy | 35 | 12341234 |
> | bob | 66 | 123471234871239847 |
> | cal | 21 | 431 |
> | cal | 21 | 431 |
> | cal | 21 | 431 |
> | cal | 21 | 431 |
> | cal | 21 | 431 |
> +--------------------+-------------------+--------------------------+--+
> 15 rows selected (0.223 seconds)
> {code}
> Hiveserver2 logs:
> {noformat}
> 2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
> ql.Driver (Driver.java:execute(1411)) - Starting
> command(queryId=hive_20170714003611_e82a07e3-83aa-451e-b4dd-6beb7382c981):
> drop table source
> 2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
> hooks.ATSHook (ATSHook.java:<init>(114)) - Created ATS Hook
> 2017-07-14 00:36:11,539 INFO [HiveServer2-Background-Pool: Thread-1214]:
> log.PerfLogger (PerfLogger.java:PerfLogBegin(149)) - <PERFLOG
> method=PreHook.org.apache.hadoop.hive.ql.hooks.ATSHook
> from=org.apache.hadoop.hive.ql.Driver>
> 2017-07-14 00:36:11,540 INFO [HiveServer2-Background-Pool: Thread-1214]:
> log.PerfLogger (PerfLogger.java:PerfLogEnd(177)) - </PERFLOG
> method=PreHook.org.apache.hadoop.hive.ql.hooks.ATSHook start=1499992571539
> end=1499992571540 duration=1 from=org.apache.hadoop.hive.ql.Driver>
> 2017-07-14 00:36:11,540 INFO [HiveServer2-Background-Pool: Thread-1214]:
> log.PerfLogger (PerfLogger.java:PerfLogBegin(149)) - <PERFLOG method=runTasks
> from=org.apache.hadoop.hive.ql.Driver>
> 2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
> ql.Driver (Driver.java:launchTask(1746)) - Starting task [Stage-0:DDL] in
> serial mode
> 2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: get_table :
> db=default tbl=source
> 2017-07-14 00:36:11,541 INFO [HiveServer2-Background-Pool: Thread-1214]:
> HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
> ip=unknown-ip-addr cmd=get_table : db=default tbl=source
> 2017-07-14 00:36:11,670 WARN [HiveServer2-Background-Pool: Thread-1214]:
> conf.HiveConf (HiveConf.java:initialize(3067)) - HiveConf of name
> hive.internal.ss.authz.settings.applied.marker does not exist
> 2017-07-14 00:36:11,670 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.HiveMetaStore (HiveMetaStore.java:newRawStoreForConf(705)) - 42:
> Opening raw store with implementation
> class:org.apache.hadoop.hive.metastore.ObjectStore
> 2017-07-14 00:36:11,678 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.ObjectStore (ObjectStore.java:initializeHelper(377)) - ObjectStore,
> initialize called
> 2017-07-14 00:36:11,683 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.MetaStoreDirectSql (MetaStoreDirectSql.java:<init>(163)) - Using
> direct SQL, underlying DB is ORACLE
> 2017-07-14 00:36:11,683 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.ObjectStore (ObjectStore.java:setConf(291)) - Initialized
> ObjectStore
> 2017-07-14 00:36:11,698 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metadata.HiveUtils
> (HiveUtils.java:getMetaStoreAuthorizeProviderManagers(355)) - Adding
> metastore authorization provider:
> org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
> 2017-07-14 00:36:11,700 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: get_table :
> db=default tbl=source
> 2017-07-14 00:36:11,701 INFO [HiveServer2-Background-Pool: Thread-1214]:
> HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
> ip=unknown-ip-addr cmd=get_table : db=default tbl=source
> 2017-07-14 00:36:11,713 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.HiveMetaStore (HiveMetaStore.java:logInfo(859)) - 42: drop_table :
> db=default tbl=source
> 2017-07-14 00:36:11,714 INFO [HiveServer2-Background-Pool: Thread-1214]:
> HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(410)) - ugi=hrt_qa
> ip=unknown-ip-addr cmd=drop_table : db=default tbl=source
> 2017-07-14 00:36:11,985 INFO [HiveServer2-Background-Pool: Thread-1214]:
> metastore.hivemetastoressimpl (HiveMetaStoreFsImpl.java:deleteDir(41)) -
> deleting
> hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/source
> 2017-07-14 00:36:11,994 ERROR [HiveServer2-Background-Pool: Thread-1214]:
> hive.log (MetaStoreUtils.java:logAndThrowMetaException(1254)) - Got
> exception: java.io.IOException Failed to move to trash:
> hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/source
> java.io.IOException: Failed to move to trash:
> hdfs://ctr-e133-1493418528701-190793-01-000004.hwx.site:8020/apps/hive/warehouse/source
> at
> org.apache.hadoop.fs.TrashPolicyDefault.moveToTrash(TrashPolicyDefault.java:174)
> at org.apache.hadoop.fs.Trash.moveToTrash(Trash.java:109)
> at org.apache.hadoop.fs.Trash.moveToAppropriateTrash(Trash.java:95)
> at
> org.apache.hadoop.hive.shims.Hadoop23Shims.moveToAppropriateTrash(Hadoop23Shims.java:262)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreFsImpl.deleteDir(HiveMetaStoreFsImpl.java:47)
> at
> org.apache.hadoop.hive.metastore.Warehouse.deleteDir(Warehouse.java:236)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.deleteTableData(HiveMetaStore.java:1749)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_table_core(HiveMetaStore.java:1715)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_table_with_environment_context(HiveMetaStore.java:1879)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105)
> at com.sun.proxy.$Proxy13.drop_table_with_environment_context(Unknown
> Source)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.drop_table_with_environment_context(HiveMetaStoreClient.java:2122)
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.drop_table_with_environment_context(SessionHiveMetaStoreClient.java:120)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:954)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:890)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:178)
> at com.sun.proxy.$Proxy14.dropTable(Unknown Source)
> at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1112)
> at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1049)
> at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:4084)
> at
> org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3940)
> at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:341)
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1748)
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1494)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1291)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1158)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1153)
> at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:197)
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:76)
> at
> org.apache.hive.service.cli.operation.SQLOperation$2$1.run(SQLOperation.java:253)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
> at
> org.apache.hive.service.cli.operation.SQLOperation$2.run(SQLOperation.java:264)
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745)
> {noformat}
--
This message was sent by Atlassian JIRA
(v6.4.14#64029)