Spark didn't find mysql connector jar on classpath; Check :
https://stackoverflow.com/questions/33192886/com-mysql-jdbc-driver-not-found-on-classpath-while-starting-spark-sql-and-thrift

You can add additional spark jar in kylin.properties, e.g.,:

kylin.engine.spark.additional-jars=/path/to/mysql-connector-java-5.1.38-bin.jar


2017-03-10 11:37 GMT+08:00 仇同心 <qiutong...@jd.com>:

> Hi all,
>
>    When built cube with spark ,I met some errors, Seems to be linked to
> the hive seems to be linked to the hive,can you help me?
>
>
>
>
>
>
>
> javax.jdo.JDOFatalInternalException: Error creating transactional
> connection factory
>
>         at org.datanucleus.api.jdo.NucleusJDOHelper.
> getJDOExceptionForNucleusException(NucleusJDOHelper.java:587)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> freezeConfiguration(JDOPersistenceManagerFactory.java:788)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>
>         at java.lang.reflect.Method.invoke(Method.java:606)
>
>         at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>
>         at java.security.AccessController.doPrivileged(Native Method)
>
>         at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>
>         at javax.jdo.JDOHelper.invokeGetPersistenceManagerFac
> toryOnImplementation(JDOHelper.java:1166)
>
>         at javax.jdo.JDOHelper.getPersistenceManagerFactory(
> JDOHelper.java:808)
>
>         at javax.jdo.JDOHelper.getPersistenceManagerFactory(
> JDOHelper.java:701)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(
> ObjectStore.java:365)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.
> getPersistenceManager(ObjectStore.java:394)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.
> initialize(ObjectStore.java:291)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.setConf(
> ObjectStore.java:258)
>
>         at org.apache.hadoop.util.ReflectionUtils.setConf(
> ReflectionUtils.java:73)
>
>         at org.apache.hadoop.util.ReflectionUtils.newInstance(
> ReflectionUtils.java:133)
>
>         at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>
> (RawStoreProxy.java:57)
>
>         at org.apache.hadoop.hive.metastore.RawStoreProxy.
> getProxy(RawStoreProxy.java:66)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.newRawStore(HiveMetaStore.java:593)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.getMS(HiveMetaStore.java:571)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.createDefaultDB(HiveMetaStore.java:620)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.init(HiveMetaStore.java:461)
>
>         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<
> init>(RetryingHMSHandler.java:66)
>
>         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.
> getProxy(RetryingHMSHandler.java:72)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore.
> newRetryingHMSHandler(HiveMetaStore.java:5762)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.
> <init>(HiveMetaStoreClient.java:199)
>
>         at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<
> init>(SessionHiveMetaStoreClient.java:74)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.apache.hadoop.hive.metastore.MetaStoreUtils.
> newInstance(MetaStoreUtils.java:1521)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>
> (RetryingMetaStoreClient.java:86)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.
> getProxy(RetryingMetaStoreClient.java:132)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.
> getProxy(RetryingMetaStoreClient.java:104)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.
> createMetaStoreClient(Hive.java:3005)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(
> Hive.java:1234)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(
> Hive.java:174)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
>
>         at org.apache.hadoop.hive.ql.session.SessionState.start(
> SessionState.java:503)
>
>         at org.apache.spark.sql.hive.client.ClientWrapper.<init>(
> ClientWrapper.scala:204)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.apache.spark.sql.hive.client.IsolatedClientLoader.
> createClient(IsolatedClientLoader.scala:249)
>
>         at org.apache.spark.sql.hive.HiveContext.metadataHive$
> lzycompute(HiveContext.scala:327)
>
>         at org.apache.spark.sql.hive.HiveContext.metadataHive(
> HiveContext.scala:237)
>
>         at org.apache.spark.sql.hive.HiveContext.setConf(
> HiveContext.scala:441)
>
>         at org.apache.spark.sql.hive.HiveContext.defaultOverrides(
> HiveContext.scala:226)
>
>         at org.apache.spark.sql.hive.HiveContext.<init>(
> HiveContext.scala:229)
>
>         at org.apache.spark.sql.hive.HiveContext.<init>(
> HiveContext.scala:101)
>
>         at org.apache.kylin.engine.spark.SparkCubingByLayer.execute(
> SparkCubingByLayer.java:156)
>
>         at org.apache.kylin.common.util.AbstractApplication.execute(
> AbstractApplication.java:37)
>
>         at org.apache.kylin.common.util.SparkEntry.main(SparkEntry.
> java:44)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>
>         at java.lang.reflect.Method.invoke(Method.java:606)
>
>         at org.apache.spark.deploy.SparkSubmit$.submit(
> SparkSubmit.scala:206)
>
>         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.
> scala:121)
>
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
> NestedThrowablesStackTrace:
>
> java.lang.reflect.InvocationTargetException
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.datanucleus.plugin.NonManagedPluginRegistry.
> createExecutableExtension(NonManagedPluginRegistry.java:631)
>
>         at org.datanucleus.plugin.PluginManager.createExecutableExtension(
> PluginManager.java:325)
>
>         at org.datanucleus.store.AbstractStoreManager.
> registerConnectionFactory(AbstractStoreManager.java:282)
>
>         at org.datanucleus.store.AbstractStoreManager.<init>(
> AbstractStoreManager.java:240)
>
>         at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(
> RDBMSStoreManager.java:286)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.datanucleus.plugin.NonManagedPluginRegistry.
> createExecutableExtension(NonManagedPluginRegistry.java:631)
>
>         at org.datanucleus.plugin.PluginManager.createExecutableExtension(
> PluginManager.java:301)
>
>         at org.datanucleus.NucleusContext.createStoreManagerForPropertie
> s(NucleusContext.java:1187)
>
>         at org.datanucleus.NucleusContext.initialise(
> NucleusContext.java:356)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>
>         at java.lang.reflect.Method.invoke(Method.java:606)
>
>         at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>
>         at java.security.AccessController.doPrivileged(Native Method)
>
>         at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>
>         at javax.jdo.JDOHelper.invokeGetPersistenceManagerFac
> toryOnImplementation(JDOHelper.java:1166)
>
>         at javax.jdo.JDOHelper.getPersistenceManagerFactory(
> JDOHelper.java:808)
>
>         at javax.jdo.JDOHelper.getPersistenceManagerFactory(
> JDOHelper.java:701)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(
> ObjectStore.java:365)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.
> getPersistenceManager(ObjectStore.java:394)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.
> initialize(ObjectStore.java:291)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.setConf(
> ObjectStore.java:258)
>
>         at org.apache.hadoop.util.ReflectionUtils.setConf(
> ReflectionUtils.java:73)
>
>         at org.apache.hadoop.util.ReflectionUtils.newInstance(
> ReflectionUtils.java:133)
>
>         at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>
> (RawStoreProxy.java:57)
>
>         at org.apache.hadoop.hive.metastore.RawStoreProxy.
> getProxy(RawStoreProxy.java:66)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.newRawStore(HiveMetaStore.java:593)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.getMS(HiveMetaStore.java:571)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.createDefaultDB(HiveMetaStore.java:620)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.init(HiveMetaStore.java:461)
>
>         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<
> init>(RetryingHMSHandler.java:66)
>
>         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.
> getProxy(RetryingHMSHandler.java:72)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore.
> newRetryingHMSHandler(HiveMetaStore.java:5762)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.
> <init>(HiveMetaStoreClient.java:199)
>
>         at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<
> init>(SessionHiveMetaStoreClient.java:74)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.apache.hadoop.hive.metastore.MetaStoreUtils.
> newInstance(MetaStoreUtils.java:1521)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>
> (RetryingMetaStoreClient.java:86)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.
> getProxy(RetryingMetaStoreClient.java:132)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.
> getProxy(RetryingMetaStoreClient.java:104)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.
> createMetaStoreClient(Hive.java:3005)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(
> Hive.java:1234)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(
> Hive.java:174)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.apache.spark.sql.hive.client.IsolatedClientLoader.
> createClient(IsolatedClientLoader.scala:249)
>
>         at org.apache.spark.sql.hive.HiveContext.setConf(
> HiveContext.scala:441)
>
>         at org.apache.spark.sql.hive.HiveContext.<init>(
> HiveContext.scala:101)
>
>         at org.apache.kylin.engine.spark.SparkCubingByLayer.execute(
> SparkCubingByLayer.java:156)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>
>         at java.lang.reflect.Method.invoke(Method.java:606)
>
>         at org.apache.spark.deploy.SparkSubmit$.submit(
> SparkSubmit.scala:206)
>
>         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.
> scala:121)
>
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> generateDataSources(ConnectionFactoryImpl.java:259)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> initialiseDataSources(ConnectionFactoryImpl.java:131)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(
> ConnectionFactoryImpl.java:85)
>
>         ... 85 more
>
>         at org.datanucleus.store.rdbms.connectionpool.
> AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.
> java:58)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> generateDataSources(ConnectionFactoryImpl.java:238)
>
>         ... 87 more
>
> 17/03/10 11:17:43 INFO metastore.ObjectStore: ObjectStore, initialize
> called
>
> 17/03/10 11:17:43 INFO DataNucleus.Persistence: Property
> hive.metastore.integral.jdo.pushdown unknown - will be ignored
>
> 17/03/10 11:17:43 INFO DataNucleus.Persistence: Property
> datanucleus.cache.level2 unknown - will be ignored
>
> 17/03/10 11:17:44 WARN metadata.Hive: Failed to access metastore. This
> class should not accessed in runtime.
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(
> Hive.java:1236)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(
> Hive.java:174)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
>
>         at org.apache.hadoop.hive.ql.session.SessionState.start(
> SessionState.java:503)
>
>         at org.apache.spark.sql.hive.client.ClientWrapper.<init>(
> ClientWrapper.scala:204)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.apache.spark.sql.hive.client.IsolatedClientLoader.
> createClient(IsolatedClientLoader.scala:249)
>
>         at org.apache.spark.sql.hive.HiveContext.metadataHive$
> lzycompute(HiveContext.scala:327)
>
>         at org.apache.spark.sql.hive.HiveContext.metadataHive(
> HiveContext.scala:237)
>
>         at org.apache.spark.sql.hive.HiveContext.setConf(
> HiveContext.scala:441)
>
>         at org.apache.spark.sql.hive.HiveContext.defaultOverrides(
> HiveContext.scala:226)
>
>         at org.apache.spark.sql.hive.HiveContext.<init>(
> HiveContext.scala:229)
>
>         at org.apache.spark.sql.hive.HiveContext.<init>(
> HiveContext.scala:101)
>
>         at org.apache.kylin.engine.spark.SparkCubingByLayer.execute(
> SparkCubingByLayer.java:156)
>
>         at org.apache.kylin.common.util.AbstractApplication.execute(
> AbstractApplication.java:37)
>
>         at org.apache.kylin.common.util.SparkEntry.main(SparkEntry.
> java:44)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>
>         at java.lang.reflect.Method.invoke(Method.java:606)
>
>         at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$
> deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>
>         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.
> scala:121)
>
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
> Caused by: java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>
>         at org.apache.hadoop.hive.metastore.MetaStoreUtils.
> newInstance(MetaStoreUtils.java:1523)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>
> (RetryingMetaStoreClient.java:86)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.
> getProxy(RetryingMetaStoreClient.java:132)
>
>         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.
> getProxy(RetryingMetaStoreClient.java:104)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.
> createMetaStoreClient(Hive.java:3005)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
>
>         at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(
> Hive.java:1234)
>
>         ... 27 more
>
> Caused by: java.lang.reflect.InvocationTargetException
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.apache.hadoop.hive.metastore.MetaStoreUtils.
> newInstance(MetaStoreUtils.java:1521)
>
>         ... 33 more
>
> Caused by: javax.jdo.JDOFatalInternalException: Error creating
> transactional connection factory
>
> NestedThrowables:
>
> java.lang.reflect.InvocationTargetException
>
>         at org.datanucleus.api.jdo.NucleusJDOHelper.
> getJDOExceptionForNucleusException(NucleusJDOHelper.java:587)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> freezeConfiguration(JDOPersistenceManagerFactory.java:788)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
>         at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
>
>         at java.lang.reflect.Method.invoke(Method.java:606)
>
>         at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>
>         at java.security.AccessController.doPrivileged(Native Method)
>
>         at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>
>         at javax.jdo.JDOHelper.invokeGetPersistenceManagerFac
> toryOnImplementation(JDOHelper.java:1166)
>
>         at javax.jdo.JDOHelper.getPersistenceManagerFactory(
> JDOHelper.java:808)
>
>         at javax.jdo.JDOHelper.getPersistenceManagerFactory(
> JDOHelper.java:701)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(
> ObjectStore.java:365)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.
> getPersistenceManager(ObjectStore.java:394)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.
> initialize(ObjectStore.java:291)
>
>         at org.apache.hadoop.hive.metastore.ObjectStore.setConf(
> ObjectStore.java:258)
>
>         at org.apache.hadoop.util.ReflectionUtils.setConf(
> ReflectionUtils.java:73)
>
>         at org.apache.hadoop.util.ReflectionUtils.newInstance(
> ReflectionUtils.java:133)
>
>         at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>
> (RawStoreProxy.java:57)
>
>         at org.apache.hadoop.hive.metastore.RawStoreProxy.
> getProxy(RawStoreProxy.java:66)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.newRawStore(HiveMetaStore.java:593)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.getMS(HiveMetaStore.java:571)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.createDefaultDB(HiveMetaStore.java:624)
>
>         at org.apache.hadoop.hive.metastore.HiveMetaStore$
> HMSHandler.init(HiveMetaStore.java:461)
>
>         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<
> init>(RetryingHMSHandler.java:66)
>
>         at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<
> init>(SessionHiveMetaStoreClient.java:74)
>
>         ... 38 more
>
> Caused by: java.lang.reflect.InvocationTargetException
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.datanucleus.store.AbstractStoreManager.
> registerConnectionFactory(AbstractStoreManager.java:282)
>
>         at org.datanucleus.store.AbstractStoreManager.<init>(
> AbstractStoreManager.java:240)
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(
> NativeConstructorAccessorImpl.java:57)
>
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(
> DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at org.datanucleus.NucleusContext.createStoreManagerForPropertie
> s(NucleusContext.java:1187)
>
>         at org.datanucleus.NucleusContext.initialise(
> NucleusContext.java:356)
>
>         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.
> freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> generateDataSources(ConnectionFactoryImpl.java:259)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> initialiseDataSources(ConnectionFactoryImpl.java:131)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(
> ConnectionFactoryImpl.java:85)
>
>         ... 85 more
>
>
>
> Caused by: org.datanucleus.exceptions.NucleusException: Attempt to invoke
> the "BONECP" plugin to create a ConnectionPool gave an erro
>
> r : The specified datastore driver ("com.mysql.jdbc.Driver") was not found
> in the CLASSPATH. Please check your CLASSPATH specificatio
>
> n, and the name of the driver.
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> generateDataSources(ConnectionFactoryImpl.java:259)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> initialiseDataSources(ConnectionFactoryImpl.java:131)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(
> ConnectionFactoryImpl.java:85)
>
>         ... 85 more
>
> Caused by: org.datanucleus.store.rdbms.connectionpool.
> DatastoreDriverNotFoundException: The specified datastore driver
> ("com.mysql.jd
>
> bc.Driver") was not found in the CLASSPATH. Please check your CLASSPATH
> specification, and the name of the driver.
>
>         at org.datanucleus.store.rdbms.connectionpool.
> AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.
> java:58)
>
>         at org.datanucleus.store.rdbms.connectionpool.
> BoneCPConnectionPoolFactory.createConnectionPool(
> BoneCPConnectionPoolFactory.ja
>
> va:54)
>
>         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.
> generateDataSources(ConnectionFactoryImpl.java:238)
>
>         ... 87 more
>



-- 
Best regards,

Shaofeng Shi 史少锋

Reply via email to