On Sat, Jul 9, 2011 at 10:24 PM, Steven Wong <sw...@netflix.com> wrote:

> Has anyone encountered the following exception? It is causing our SELECT
> queries to return incorrect results infrequently.****
>
> ** **
>
> 2011-07-06 13:46:40,225 WARN  DataNucleus.Query
> (Log4JLogger.java:warn(106)) - Query for candidates of
> org.apache.hadoop.hive.metastore.model.MPartition and subclasses resulted in
> no possible candidates****
>
> Exception thrown obtaining schema column information from datastore****
>
> org.datanucleus.exceptions.NucleusDataStoreException: Exception thrown
> obtaining schema column information from datastore****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:991)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
> ****
>
>     at
> org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
> ****
>
>     at
> org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
> ****
>
>     at
> org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
> ****
>
>     at
> org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
> ****
>
>     at
> org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
> ****
>
>     at
> org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
> ****
>
>     at
> org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
> ****
>
>     at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)****
>
>     at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
> ****
>
>     at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)****
>
>     at
> org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
> ****
>
>     at
> org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)*
> ***
>
>     at
> org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
> ****
>
>     at
> org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
> ****
>
>     at
> org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
> ****
>
>     at
> org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)*
> ***
>
>     at
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
> ****
>
>     at
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
> ****
>
>     at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)****
>
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)****
>
>     at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
> ****
>
>     at
> org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
> ****
>
>     at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
> ****
>
>     at
> org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)****
>
>     at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)****
>
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)****
>
>     at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> ****
>
>     at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> ****
>
>     at java.lang.reflect.Method.invoke(Method.java:597)****
>
>     at org.apache.hadoop.util.RunJar.main(RunJar.java:156)****
>
> Caused by: com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table
> 'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist****
>
>     at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)****
>
>     at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)****
>
>     at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)****
>
>     at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)****
>
>     at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)****
>
>     at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)****
>
>    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)****
>
>     at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)****
>
>     at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)*
> ***
>
>     at
> com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)****
>
>     at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)****
>
>     at
> com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)****
>
>     at
> org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
> ****
>
>     at
> org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
> ****
>
>     ... 47 more****
>
> Nested Throwables StackTrace:****
>
> com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table
> 'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist****
>
>     at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)****
>
>     at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)****
>
>     at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)****
>
>     at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)****
>
>     at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)****
>
>     at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)****
>
>     at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)****
>
>     at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)****
>
>     at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)*
> ***
>
>     at
> com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)****
>
>     at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)****
>
>     at
> com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)****
>
>     at
> org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
> ****
>
>     at
> org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
> ****
>
>     at
> org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
> ****
>
>     at
> org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
> ****
>
>     at
> org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
> ****
>
>     at
> org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
> ****
>
>     at
> org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
> ****
>
>     at
> org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
> ****
>
>     at
> org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
> ****
>
>     at
> org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
> ****
>
>     at
> org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
> ****
>
>     at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)****
>
>     at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
> ****
>
>     at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)****
>
>     at
> org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
> ****
>
>    at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
> ****
>
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
> ****
>
>     at
> org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)*
> ***
>
>     at
> org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
> ****
>
>     at
> org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
> ****
>
>     at
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
> ****
>
>     at
> org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
> ****
>
>     at
> org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)*
> ***
>
>     at
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
> ****
>
>     at
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
> ****
>
>     at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)****
>
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)****
>
>     at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
> ****
>
>     at
> org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
> ****
>
>     at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
> ****
>
>     at
> org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)****
>
>     at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)****
>
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)****
>
>     at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> ****
>
>     at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> ****
>
>     at java.lang.reflect.Method.invoke(Method.java:597)****
>
>     at org.apache.hadoop.util.RunJar.main(RunJar.java:156)****
>
> ** **
>

datanucleus creates these deleteme tables to test the waters. During the
session it creates and deletes a table. It could be that your DB user does
not have the proper permissions, or possibly transactional/innodb is not
enable.

Reply via email to