Sai Hemanth Gantasala created HIVE-28350:
--------------------------------------------

             Summary: Drop remote database succeeds but fails while deleting 
data under
                 Key: HIVE-28350
                 URL: https://issues.apache.org/jira/browse/HIVE-28350
             Project: Hive
          Issue Type: Sub-task
          Components: Hive, Standalone Metastore
            Reporter: Sai Hemanth Gantasala
            Assignee: Sai Hemanth Gantasala


Drop remote database operation succeeds but fails towards the end while 
clearing data under the database's location because while fetching database 
object via JDO we don't seem to set the 'locationUri' field.
{code:java}
> drop database pg_hive_tests;
INFO  : Compiling 
command(queryId=hive_20240625161645_bbe11908-8d1c-46d7-9a02-1ef2091e1b86): drop 
database pg_hive_tests
INFO  : Semantic Analysis Completed (retrial = false)
INFO  : Created Hive schema: Schema(fieldSchemas:null, properties:null)
INFO  : Completed compiling 
command(queryId=hive_20240625161645_bbe11908-8d1c-46d7-9a02-1ef2091e1b86); Time 
taken: 0.115 seconds
INFO  : Executing 
command(queryId=hive_20240625161645_bbe11908-8d1c-46d7-9a02-1ef2091e1b86): drop 
database pg_hive_tests
INFO  : Starting task [Stage-0:DDL] in serial mode
ERROR : Failed
org.apache.hadoop.hive.ql.metadata.HiveException: 
MetaException(message:java.lang.IllegalArgumentException: Can not create a Path 
from a null string)
    at org.apache.hadoop.hive.ql.metadata.Hive.dropDatabase(Hive.java:716) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:356) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:329) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:246) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:107) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:813) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:550) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:544) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:190) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:235)
 ~[hive-service-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:92)
 ~[hive-service-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:340)
 ~[hive-service-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_232]
    at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_232]
    at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
 ~[hadoop-common-3.1.1.7.2.18.0-641.jar:?]
    at 
org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:360)
 ~[hive-service-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
~[?:1.8.0_232]
    at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_232]
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
~[?:1.8.0_232]
    at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_232]
    at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 
~[?:1.8.0_232]
    at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 
~[?:1.8.0_232]
    at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_232]
Caused by: org.apache.hadoop.hive.metastore.api.MetaException: 
java.lang.IllegalArgumentException: Can not create a Path from a null string
    at 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$drop_database_result$drop_database_resultStandardScheme.read(ThriftHiveMetastore.java:56524)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$drop_database_result$drop_database_resultStandardScheme.read(ThriftHiveMetastore.java:56492)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$drop_database_result.read(ThriftHiveMetastore.java:56426)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:88) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_drop_database(ThriftHiveMetastore.java:1461)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.drop_database(ThriftHiveMetastore.java:1446)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropDatabase(HiveMetaStoreClient.java:1670)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_232]
    at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_232]
    at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_232]
    at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_232]
    at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:216)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at com.sun.proxy.$Proxy53.dropDatabase(Unknown Source) ~[?:?]
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_232]
    at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_232]
    at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_232]
    at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_232]
    at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:4456)
 ~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    at com.sun.proxy.$Proxy53.dropDatabase(Unknown Source) ~[?:?]
    at org.apache.hadoop.hive.ql.metadata.Hive.dropDatabase(Hive.java:712) 
~[hive-exec-3.1.3000.7.2.18.0-641.jar:3.1.3000.7.2.18.0-641]
    ... 26 more {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to