You need to have the datanuclus jars on your classpath.  It is not okay to
merge them into an uber jar.


On Wed, Aug 27, 2014 at 1:44 AM, centerqi hu <cente...@gmail.com> wrote:

> Hi all
>
>
> When I run a simple SQL, encountered the following error.
>
> hive:0.12(metastore in mysql)
>
> hadoop 2.4.1
>
> spark 1.0.2 build with hive
>
>
> my hql code
>
> import org.apache.spark.{SparkConf, SparkContext}
> import org.apache.spark.sql._
> import org.apache.spark.sql.hive.LocalHiveContext
> object HqlTest {
>   case class Record(key: Int, value: String)
>   def main(args: Array[String]) {
>     val sparkConf = new SparkConf().setAppName("HiveFromSpark")
>     val sc = new SparkContext(sparkConf)
>   val hiveContext = new org.apache.spark.sql.hive.HiveContext(sc)
>   import hiveContext._
>     hql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING)")
>   }
>
> }
>
>
>
>
>
>
> 14/08/27 16:07:08 INFO HiveMetaStore: 0: Opening raw store with
> implemenation class:org.apache.hadoop.hive.metastore.ObjectStore
> 14/08/27 16:07:08 INFO ObjectStore: ObjectStore, initialize called
> org.apache.hadoop.hive.ql.metadata.HiveException: Unable to fetch table src
> at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:958)
> at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:905)
> at
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeCreateTable(SemanticAnalyzer.java:8999)
> at
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:8313)
> at
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:284)
> at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:441)
> at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:342)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:977)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:888)
> at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:189)
> at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:163)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.sideEffectResult$lzycompute(NativeCommand.scala:35)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.sideEffectResult(NativeCommand.scala:35)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.execute(NativeCommand.scala:38)
> at
> org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd$lzycompute(HiveContext.scala:250)
> at
> org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd(HiveContext.scala:250)
> at org.apache.spark.sql.SchemaRDDLike$class.$init$(SchemaRDDLike.scala:58)
> at org.apache.spark.sql.SchemaRDD.<init>(SchemaRDD.scala:104)
> at org.apache.spark.sql.hive.HiveContext.hiveql(HiveContext.scala:75)
> at org.apache.spark.sql.hive.HiveContext.hql(HiveContext.scala:78)
> at HqlTest$.main(HqlTest.scala:15)
> at HqlTest.main(HqlTest.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:186)
> Caused by: java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1212)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2372)
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2383)
> at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:950)
> ... 26 more
> Caused by: java.lang.reflect.InvocationTargetException
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1210)
> ... 31 more
> Caused by: javax.jdo.JDOFatalUserException: Class
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory was not found.
> NestedThrowables:
> java.lang.ClassNotFoundException:
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1175)
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:275)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:304)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:234)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:209)
> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
> at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
> at
> org.apache.hadoop.hive.metastore.RetryingRawStore.<init>(RetryingRawStore.java:64)
> at
> org.apache.hadoop.hive.metastore.RetryingRawStore.getProxy(RetryingRawStore.java:73)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:415)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:402)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:441)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:326)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:286)
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4060)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:121)
> ... 36 more
> Caused by: java.lang.ClassNotFoundException:
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory
> at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> at java.security.AccessController.doPrivileged(Native Method)
> at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:270)
> at javax.jdo.JDOHelper$18.run(JDOHelper.java:2018)
> at javax.jdo.JDOHelper$18.run(JDOHelper.java:2016)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.jdo.JDOHelper.forName(JDOHelper.java:2015)
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1162)
> ... 55 more
> 14/08/27 16:07:08 INFO Driver: Semantic Analysis Completed
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=semanticAnalyze
> start=1409126828417 end=1409126828624 duration=207>
> 14/08/27 16:07:08 INFO Driver: Returning Hive schema:
> Schema(fieldSchemas:null, properties:null)
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=compile
> start=1409126828395 end=1409126828631 duration=236>
> 14/08/27 16:07:08 INFO Driver: <PERFLOG method=Driver.execute>
> 14/08/27 16:07:08 INFO deprecation: mapred.job.name is deprecated.
> Instead, use mapreduce.job.name
> 14/08/27 16:07:08 INFO Driver: Starting command: CREATE TABLE IF NOT
> EXISTS src (key INT, value STRING)
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=TimeToSubmit
> start=1409126828394 end=1409126828634 duration=240>
> 14/08/27 16:07:08 INFO Driver: <PERFLOG method=runTasks>
> 14/08/27 16:07:08 INFO Driver: <PERFLOG method=task.DDL.Stage-0>
> 14/08/27 16:07:08 INFO DDLTask: Default to LazySimpleSerDe for table src
> 14/08/27 16:07:08 INFO HiveMetaStore: 0: Opening raw store with
> implemenation class:org.apache.hadoop.hive.metastore.ObjectStore
> 14/08/27 16:07:08 INFO ObjectStore: ObjectStore, initialize called
> 14/08/27 16:07:08 ERROR DDLTask:
> org.apache.hadoop.hive.ql.metadata.HiveException:
> java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:602)
> at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:3661)
> at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:252)
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:151)
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:65)
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1414)
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1192)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1020)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:888)
> at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:189)
> at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:163)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.sideEffectResult$lzycompute(NativeCommand.scala:35)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.sideEffectResult(NativeCommand.scala:35)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.execute(NativeCommand.scala:38)
> at
> org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd$lzycompute(HiveContext.scala:250)
> at
> org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd(HiveContext.scala:250)
> at org.apache.spark.sql.SchemaRDDLike$class.$init$(SchemaRDDLike.scala:58)
> at org.apache.spark.sql.SchemaRDD.<init>(SchemaRDD.scala:104)
> at org.apache.spark.sql.hive.HiveContext.hiveql(HiveContext.scala:75)
> at org.apache.spark.sql.hive.HiveContext.hql(HiveContext.scala:78)
> at HqlTest$.main(HqlTest.scala:15)
> at HqlTest.main(HqlTest.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:186)
> Caused by: java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1212)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2372)
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2383)
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:596)
> ... 26 more
> Caused by: java.lang.reflect.InvocationTargetException
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1210)
> ... 31 more
> Caused by: javax.jdo.JDOFatalUserException: Class
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory was not found.
> NestedThrowables:
> java.lang.ClassNotFoundException:
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1175)
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:275)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:304)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:234)
> at
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:209)
> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
> at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
> at
> org.apache.hadoop.hive.metastore.RetryingRawStore.<init>(RetryingRawStore.java:64)
> at
> org.apache.hadoop.hive.metastore.RetryingRawStore.getProxy(RetryingRawStore.java:73)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:415)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:402)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:441)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:326)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:286)
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4060)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:121)
> ... 36 more
> Caused by: java.lang.ClassNotFoundException:
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory
> at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> at java.security.AccessController.doPrivileged(Native Method)
> at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:270)
> at javax.jdo.JDOHelper$18.run(JDOHelper.java:2018)
> at javax.jdo.JDOHelper$18.run(JDOHelper.java:2016)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.jdo.JDOHelper.forName(JDOHelper.java:2015)
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1162)
> ... 55 more
>
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=task.DDL.Stage-0
> start=1409126828634 end=1409126828682 duration=48>
> 14/08/27 16:07:08 ERROR Driver: FAILED: Execution Error, return code 1
> from org.apache.hadoop.hive.ql.exec.DDLTask.
> java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=Driver.execute
> start=1409126828631 end=1409126828686 duration=55>
> 14/08/27 16:07:08 INFO Driver: <PERFLOG method=releaseLocks>
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=releaseLocks
> start=1409126828686 end=1409126828686 duration=0>
> 14/08/27 16:07:08 INFO Driver: <PERFLOG method=releaseLocks>
> 14/08/27 16:07:08 INFO Driver: </PERFLOG method=releaseLocks
> start=1409126828687 end=1409126828687 duration=0>
> 14/08/27 16:07:08 ERROR HiveContext:
> ======================
> HIVE FAILURE OUTPUT
> ======================
> FAILED: Execution Error, return code 1 from
> org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException:
> Unable to instantiate
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
>
> ======================
> END HIVE FAILURE OUTPUT
> ======================
>
> 14/08/27 16:07:08 INFO ApplicationMaster: finishApplicationMaster with
> FAILED
> 14/08/27 16:07:08 INFO AMRMClientImpl: Waiting for application to be
> successfully unregistered.
> Exception in thread "Thread-4" java.lang.reflect.InvocationTargetException
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:186)
> Caused by: org.apache.spark.sql.execution.QueryExecutionException:
> FAILED: Execution Error, return code 1 from
> org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException:
> Unable to instantiate
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:193)
> at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:163)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.sideEffectResult$lzycompute(NativeCommand.scala:35)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.sideEffectResult(NativeCommand.scala:35)
> at
> org.apache.spark.sql.hive.execution.NativeCommand.execute(NativeCommand.scala:38)
> at
> org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd$lzycompute(HiveContext.scala:250)
> at
> org.apache.spark.sql.hive.HiveContext$QueryExecution.toRdd(HiveContext.scala:250)
> at org.apache.spark.sql.SchemaRDDLike$class.$init$(SchemaRDDLike.scala:58)
> at org.apache.spark.sql.SchemaRDD.<init>(SchemaRDD.scala:104)
> at org.apache.spark.sql.hive.HiveContext.hiveql(HiveContext.scala:75)
> at org.apache.spark.sql.hive.HiveContext.hql(HiveContext.scala:78)
> at HqlTest$.main(HqlTest.scala:15)
> at HqlTest.main(HqlTest.scala)
> ... 5 more
>
>
> --
> cente...@gmail.com|齐忠
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
> For additional commands, e-mail: user-h...@spark.apache.org
>
>

Reply via email to