Hi,

When we execute drop partition command on hive external table  from
spark-shell we are getting below error.Same command works fine from hive
shell.

It is a table with just two records

Spark Version : 1.5.2

scala> hiveCtx.sql("select * from
spark_2_test").collect().foreach(println);
[1210,xcv,2016-10-10]
[1210,xcv,2016-10-11]

*Show create table *

CREATE EXTERNAL TABLE `spark_2_test`(
`name` string,
`dept` string)
PARTITIONED BY (
`server_date` date)
ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION
'hdfs://xxxx/spark/sp'
TBLPROPERTIES (
'STATS_GENERATED_VIA_STATS_TASK'='true',
'transient_lastDdlTime'='1485202737')


scala> hiveCtx.sql("ALTER TABLE spark_2_test DROP IF EXISTS PARTITION
(server_date ='2016-10-10')")



Thanks in advance,
Subacini

17/01/23 22:09:04 ERROR Driver: FAILED: SemanticException [Error 10006]:
Partition not found (server_date = 2016-10-10)
org.apache.hadoop.hive.ql.parse.SemanticException: Partition not found
(server_date = 2016-10-10)
    at
org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3178)
    at
org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2694)
    at
org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:278)
    at
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:227)
    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424)
    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
    at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
    at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:451)
    at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:440)
    at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:278)
    at
org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:233)
    at
org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:270)
    at
org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:440)
    at
org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:430)
    at
org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:561)
    at
org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:33)
    at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
    at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
    at
org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
    at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:140)
    at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:138)
    at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:138)
    at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:933)
    at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:933)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:144)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:129)
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:51)
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:725)
    at
$line105.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:24)
    at
$line105.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:29)
    at $line105.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
    at $line105.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
    at $line105.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
    at $line105.$read$$iwC$$iwC$$iwC.<init>(<console>:37)
    at $line105.$read$$iwC$$iwC.<init>(<console>:39)
    at $line105.$read$$iwC.<init>(<console>:41)
    at $line105.$read.<init>(<console>:43)
    at $line105.$read$.<init>(<console>:47)
    at $line105.$read$.<clinit>(<console>)
    at $line105.$eval$.<init>(<console>:7)
    at $line105.$eval$.<clinit>(<console>)
    at $line105.$eval.$print(<console>)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
    at
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
    at
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
    at
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
    at
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
    at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
    at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
    at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
    at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
    at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
    at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
    at org.apache.spark.repl.Main$.main(Main.scala:31)
    at org.apache.spark.repl.Main.main(Main.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
    at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: MetaException(message:Unable to find class:
㐀org.apache.hadoop.hive.ql.udf.generic.G
Serialization trace:
typeInfo (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc))
    at
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_partitions_by_expr_result$get_partitions_by_expr_resultStandardScheme.read(ThriftHiveMetastore.java)
    at
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_partitions_by_expr_result$get_partitions_by_expr_resultStandardScheme.read(ThriftHiveMetastore.java)
    at
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_partitions_by_expr_result.read(ThriftHiveMetastore.java)
    at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:78)
    at
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_partitions_by_expr(ThriftHiveMetastore.java:2277)
    at
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_partitions_by_expr(ThriftHiveMetastore.java:2264)
    at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:1130)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
    at com.sun.proxy.$Proxy52.listPartitionsByExpr(Unknown Source)
    at
org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:2289)
    at
org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3176)
    ... 77 more

17/01/23 22:09:04 ERROR ClientWrapper:
======================
HIVE FAILURE OUTPUT
======================
SET hive.support.sql11.reserved.keywords=false
FAILED: SemanticException java.lang.IllegalArgumentException:
java.net.UnknownHostException: user
OK
FAILED: SemanticException java.lang.IllegalArgumentException:
java.net.UnknownHostException: user
FAILED: SemanticException java.lang.IllegalArgumentException:
java.net.UnknownHostException: user
OK
FAILED: SemanticException Column server_date not found
FAILED: SemanticException Column server_date not found
OK
FAILED: SemanticException Column server_date not found
OK
FAILED: SemanticException Column server_date not found
FAILED: SemanticException Column server_date not found
OK
OK
OK
FAILED: SemanticException [Error 10006]: Partition not found (server_date =
2016-10-10)
OK
FAILED: SemanticException Column server_date not found
OK
FAILED: SemanticException [Error 10006]: Partition not found (server_date =
2016-10-10)

======================
END HIVE FAILURE OUTPUT
======================

org.apache.spark.sql.execution.QueryExecutionException: FAILED:
SemanticException [Error 10006]: Partition not found (server_date =
2016-10-10)
    at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:455)
    at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:440)
    at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:278)
    at
org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:233)
    at
org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:270)
    at
org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:440)
    at
org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:430)
    at
org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:561)
    at
org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:33)
    at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
    at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
    at
org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
    at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:140)
    at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:138)
    at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:138)
    at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:933)
    at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:933)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:144)
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:129)
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:51)
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:725)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:24)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:29)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
    at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
    at $iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
    at $iwC$$iwC$$iwC.<init>(<console>:37)
    at $iwC$$iwC.<init>(<console>:39)
    at $iwC.<init>(<console>:41)
    at <init>(<console>:43)
    at .<init>(<console>:47)
    at .<clinit>(<console>)
    at .<init>(<console>:7)
    at .<clinit>(<console>)
    at $print(<console>)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
    at
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
    at
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
    at
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
    at
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
    at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
    at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
    at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
    at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
    at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
    at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
    at org.apache.spark.repl.Main$.main(Main.scala:31)
    at org.apache.spark.repl.Main.main(Main.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
    at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Reply via email to