http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java ---------------------------------------------------------------------- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java deleted file mode 100644 index 22e246f..0000000 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java +++ /dev/null @@ -1,487 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore.tools; - -import java.net.URI; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.ObjectStore; - -/** - * This class provides Hive admins a tool to - * - execute JDOQL against the metastore using DataNucleus - * - perform HA name node upgrade - */ - -public class HiveMetaTool { - - private static final Logger LOG = LoggerFactory.getLogger(HiveMetaTool.class.getName()); - private final Options cmdLineOptions = new Options(); - private ObjectStore objStore; - private boolean isObjStoreInitialized; - - public HiveMetaTool() { - this.isObjStoreInitialized = false; - } - - @SuppressWarnings("static-access") - private void init() { - - System.out.println("Initializing HiveMetaTool.."); - - Option help = new Option("help", "print this message"); - Option listFSRoot = new Option("listFSRoot", "print the current FS root locations"); - Option executeJDOQL = - OptionBuilder.withArgName("query-string") - .hasArgs() - .withDescription("execute the given JDOQL query") - .create("executeJDOQL"); - - /* Ideally we want to specify the different arguments to updateLocation as separate argNames. - * However if we did that, HelpFormatter swallows all but the last argument. Note that this is - * a know issue with the HelpFormatter class that has not been fixed. We specify all arguments - * with a single argName to workaround this HelpFormatter bug. - */ - Option updateFSRootLoc = - OptionBuilder - .withArgName("new-loc> " + "<old-loc") - .hasArgs(2) - .withDescription( - "Update FS root location in the metastore to new location.Both new-loc and " + - "old-loc should be valid URIs with valid host names and schemes." + - "When run with the dryRun option changes are displayed but are not " + - "persisted. When run with the serdepropKey/tablePropKey option " + - "updateLocation looks for the serde-prop-key/table-prop-key that is " + - "specified and updates its value if found.") - .create("updateLocation"); - Option dryRun = new Option("dryRun" , "Perform a dry run of updateLocation changes.When " + - "run with the dryRun option updateLocation changes are displayed but not persisted. " + - "dryRun is valid only with the updateLocation option."); - Option serdePropKey = - OptionBuilder.withArgName("serde-prop-key") - .hasArgs() - .withValueSeparator() - .withDescription("Specify the key for serde property to be updated. serdePropKey option " + - "is valid only with updateLocation option.") - .create("serdePropKey"); - Option tablePropKey = - OptionBuilder.withArgName("table-prop-key") - .hasArg() - .withValueSeparator() - .withDescription("Specify the key for table property to be updated. tablePropKey option " + - "is valid only with updateLocation option.") - .create("tablePropKey"); - - cmdLineOptions.addOption(help); - cmdLineOptions.addOption(listFSRoot); - cmdLineOptions.addOption(executeJDOQL); - cmdLineOptions.addOption(updateFSRootLoc); - cmdLineOptions.addOption(dryRun); - cmdLineOptions.addOption(serdePropKey); - cmdLineOptions.addOption(tablePropKey); - } - - private void initObjectStore(HiveConf hiveConf) { - if (!isObjStoreInitialized) { - objStore = new ObjectStore(); - objStore.setConf(hiveConf); - isObjStoreInitialized = true; - } - } - - private void shutdownObjectStore() { - if (isObjStoreInitialized) { - objStore.shutdown(); - isObjStoreInitialized = false; - } - } - - private void listFSRoot() { - HiveConf hiveConf = new HiveConf(HiveMetaTool.class); - initObjectStore(hiveConf); - - Set<String> hdfsRoots = objStore.listFSRoots(); - if (hdfsRoots != null) { - System.out.println("Listing FS Roots.."); - for (String s : hdfsRoots) { - System.out.println(s); - } - } else { - System.err.println("Encountered error during listFSRoot - " + - "commit of JDO transaction failed"); - } - } - - private void executeJDOQLSelect(String query) { - HiveConf hiveConf = new HiveConf(HiveMetaTool.class); - initObjectStore(hiveConf); - - System.out.println("Executing query: " + query); - ObjectStore.QueryWrapper queryWrapper = new ObjectStore.QueryWrapper(); - try { - Collection<?> result = objStore.executeJDOQLSelect(query, queryWrapper); - if (result != null) { - Iterator<?> iter = result.iterator(); - while (iter.hasNext()) { - Object o = iter.next(); - System.out.println(o.toString()); - } - } else { - System.err.println("Encountered error during executeJDOQLSelect -" + - "commit of JDO transaction failed."); - } - } finally { - queryWrapper.close(); - } - } - - private long executeJDOQLUpdate(String query) { - HiveConf hiveConf = new HiveConf(HiveMetaTool.class); - initObjectStore(hiveConf); - - System.out.println("Executing query: " + query); - long numUpdated = objStore.executeJDOQLUpdate(query); - if (numUpdated >= 0) { - System.out.println("Number of records updated: " + numUpdated); - } else { - System.err.println("Encountered error during executeJDOQL -" + - "commit of JDO transaction failed."); - } - return numUpdated; - } - - private int printUpdateLocations(Map<String, String> updateLocations) { - int count = 0; - for (String key: updateLocations.keySet()) { - String value = updateLocations.get(key); - System.out.println("old location: " + key + " new location: " + value); - count++; - } - return count; - } - - private void printTblURIUpdateSummary(ObjectStore.UpdateMStorageDescriptorTblURIRetVal retVal, - boolean isDryRun) { - String tblName = new String("SDS"); - String fieldName = new String("LOCATION"); - - if (retVal == null) { - System.err.println("Encountered error while executing updateMStorageDescriptorTblURI - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - fieldName + "field in " + tblName + " table."); - } else { - Map<String, String> updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List<String> badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + fieldName + " in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - int numNullRecords = retVal.getNumNullRecords(); - if (numNullRecords != 0) { - LOG.debug("Number of NULL location URI: " + numNullRecords + - ". This can happen for View or Index."); - } - } - } - - private void printDatabaseURIUpdateSummary(ObjectStore.UpdateMDatabaseURIRetVal retVal, - boolean isDryRun) { - String tblName = new String("DBS"); - String fieldName = new String("LOCATION_URI"); - - if (retVal == null) { - System.err.println("Encountered error while executing updateMDatabaseURI - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - fieldName + "field in " + tblName + " table."); - } else { - Map<String, String> updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List<String> badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + fieldName + " in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - } - } - - private void printPropURIUpdateSummary(ObjectStore.UpdatePropURIRetVal retVal, String - tablePropKey, boolean isDryRun, String tblName, String methodName) { - if (retVal == null) { - System.err.println("Encountered error while executing " + methodName + " - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - "value field corresponding to" + tablePropKey + " in " + tblName + " table."); - } else { - Map<String, String> updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List<String> badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + tablePropKey + " key in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - } - } - - private void printSerdePropURIUpdateSummary(ObjectStore.UpdateSerdeURIRetVal retVal, - String serdePropKey, boolean isDryRun) { - String tblName = new String("SERDE_PARAMS"); - - if (retVal == null) { - System.err.println("Encountered error while executing updateSerdeURI - " + - "commit of JDO transaction failed. Failed to update FSRoot locations in " + - "value field corresponding to " + serdePropKey + " in " + tblName + " table."); - } else { - Map<String, String> updateLocations = retVal.getUpdateLocations(); - if (isDryRun) { - System.out.println("Dry Run of updateLocation on table " + tblName + ".."); - } else { - System.out.println("Successfully updated the following locations.."); - } - int count = printUpdateLocations(updateLocations); - if (isDryRun) { - System.out.println("Found " + count + " records in " + tblName + " table to update"); - } else { - System.out.println("Updated " + count + " records in " + tblName + " table"); - } - List<String> badRecords = retVal.getBadRecords(); - if (badRecords.size() > 0) { - System.err.println("Warning: Found records with bad " + serdePropKey + " key in " + - tblName + " table.. "); - for (String badRecord:badRecords) { - System.err.println("bad location URI: " + badRecord); - } - } - } - } - - public void updateFSRootLocation(URI oldURI, URI newURI, String serdePropKey, String - tablePropKey, boolean isDryRun) { - HiveConf hiveConf = new HiveConf(HiveMetaTool.class); - initObjectStore(hiveConf); - - System.out.println("Looking for LOCATION_URI field in DBS table to update.."); - ObjectStore.UpdateMDatabaseURIRetVal updateMDBURIRetVal = objStore.updateMDatabaseURI(oldURI, - newURI, isDryRun); - printDatabaseURIUpdateSummary(updateMDBURIRetVal, isDryRun); - - System.out.println("Looking for LOCATION field in SDS table to update.."); - ObjectStore.UpdateMStorageDescriptorTblURIRetVal updateTblURIRetVal = - objStore.updateMStorageDescriptorTblURI(oldURI, newURI, isDryRun); - printTblURIUpdateSummary(updateTblURIRetVal, isDryRun); - - if (tablePropKey != null) { - System.out.println("Looking for value of " + tablePropKey + " key in TABLE_PARAMS table " + - "to update.."); - ObjectStore.UpdatePropURIRetVal updateTblPropURIRetVal = - objStore.updateTblPropURI(oldURI, newURI, - tablePropKey, isDryRun); - printPropURIUpdateSummary(updateTblPropURIRetVal, tablePropKey, isDryRun, "TABLE_PARAMS", - "updateTblPropURI"); - - System.out.println("Looking for value of " + tablePropKey + " key in SD_PARAMS table " + - "to update.."); - ObjectStore.UpdatePropURIRetVal updatePropURIRetVal = objStore - .updateMStorageDescriptorTblPropURI(oldURI, newURI, tablePropKey, isDryRun); - printPropURIUpdateSummary(updatePropURIRetVal, tablePropKey, isDryRun, "SD_PARAMS", - "updateMStorageDescriptorTblPropURI"); - } - - if (serdePropKey != null) { - System.out.println("Looking for value of " + serdePropKey + " key in SERDE_PARAMS table " + - "to update.."); - ObjectStore.UpdateSerdeURIRetVal updateSerdeURIretVal = objStore.updateSerdeURI(oldURI, - newURI, serdePropKey, isDryRun); - printSerdePropURIUpdateSummary(updateSerdeURIretVal, serdePropKey, isDryRun); - } - } - - private static void printAndExit(HiveMetaTool metaTool) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("metatool", metaTool.cmdLineOptions); - System.exit(1); - } - - public static void main(String[] args) { - HiveMetaTool metaTool = new HiveMetaTool(); - metaTool.init(); - CommandLineParser parser = new GnuParser(); - CommandLine line = null; - - try { - try { - line = parser.parse(metaTool.cmdLineOptions, args); - } catch (ParseException e) { - System.err.println("HiveMetaTool:Parsing failed. Reason: " + e.getLocalizedMessage()); - printAndExit(metaTool); - } - - if (line.hasOption("help")) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("metatool", metaTool.cmdLineOptions); - } else if (line.hasOption("listFSRoot")) { - if (line.hasOption("dryRun")) { - System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot"); - printAndExit(metaTool); - } else if (line.hasOption("serdePropKey")) { - System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot"); - printAndExit(metaTool); - } else if (line.hasOption("tablePropKey")) { - System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot"); - printAndExit(metaTool); - } - metaTool.listFSRoot(); - } else if (line.hasOption("executeJDOQL")) { - String query = line.getOptionValue("executeJDOQL"); - if (line.hasOption("dryRun")) { - System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL"); - printAndExit(metaTool); - } else if (line.hasOption("serdePropKey")) { - System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL"); - printAndExit(metaTool); - } else if (line.hasOption("tablePropKey")) { - System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL"); - printAndExit(metaTool); - } - if (query.toLowerCase().trim().startsWith("select")) { - metaTool.executeJDOQLSelect(query); - } else if (query.toLowerCase().trim().startsWith("update")) { - metaTool.executeJDOQLUpdate(query); - } else { - System.err.println("HiveMetaTool:Unsupported statement type"); - printAndExit(metaTool); - } - } else if (line.hasOption("updateLocation")) { - String[] loc = line.getOptionValues("updateLocation"); - boolean isDryRun = false; - String serdepropKey = null; - String tablePropKey = null; - - if (loc.length != 2 && loc.length != 3) { - System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " + - "optional arguments but " + - "was passed " + loc.length + " arguments"); - printAndExit(metaTool); - } - - Path newPath = new Path(loc[0]); - Path oldPath = new Path(loc[1]); - - URI oldURI = oldPath.toUri(); - URI newURI = newPath.toUri(); - - if (line.hasOption("dryRun")) { - isDryRun = true; - } - - if (line.hasOption("serdePropKey")) { - serdepropKey = line.getOptionValue("serdePropKey"); - } - - if (line.hasOption("tablePropKey")) { - tablePropKey = line.getOptionValue("tablePropKey"); - } - - /* - * validate input - Both new and old URI should contain valid host names and valid schemes. - * port is optional in both the URIs since HDFS HA NN URI doesn't have a port. - */ - if (oldURI.getHost() == null || newURI.getHost() == null) { - System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc"); - } else if (oldURI.getScheme() == null || newURI.getScheme() == null) { - System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc"); - } else { - metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun); - } - } else { - if (line.hasOption("dryRun")) { - System.err.println("HiveMetaTool: dryRun is not a valid standalone option"); - } else if (line.hasOption("serdePropKey")) { - System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option"); - } else if (line.hasOption("tablePropKey")) { - System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option"); - printAndExit(metaTool); - } else { - System.err.print("HiveMetaTool:Parsing failed. Reason: Invalid arguments: " ); - for (String s : line.getArgs()) { - System.err.print(s + " "); - } - System.err.println(); - } - printAndExit(metaTool); - } - } finally { - metaTool.shutdownObjectStore(); - } - } -} \ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/AlternateFailurePreListener.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/AlternateFailurePreListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/AlternateFailurePreListener.java deleted file mode 100644 index 22146ba..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/AlternateFailurePreListener.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.metastore; - -import javax.jdo.JDOException; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import org.apache.hadoop.hive.metastore.events.PreEventContext; - -/** - * - * AlternateFailurePreListener. - * - * An implementation of MetaStorePreEventListener which fails every other time it's invoked, - * starting with the first time. - * - * It also records and makes available the number of times it's been invoked. - */ -public class AlternateFailurePreListener extends MetaStorePreEventListener { - - private static int callCount = 0; - private static boolean throwException = true; - - public AlternateFailurePreListener(Configuration config) { - super(config); - } - - @Override - public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectException, - InvalidOperationException { - - callCount++; - if (throwException) { - throwException = false; - throw new JDOException(); - } - - throwException = true; - } - - public static int getCallCount() { - return callCount; - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java deleted file mode 100644 index ac62cd5..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.hadoop.conf.Configuration; - - -/** A dummy implementation for - * {@link org.apache.hadoop.hive.metastore.MetaStoreEndFunctionListener} - * for testing purposes. - */ -public class DummyEndFunctionListener extends MetaStoreEndFunctionListener{ - - public static final List<String> funcNameList = new ArrayList<String>(); - public static final List<MetaStoreEndFunctionContext> contextList = - new ArrayList<MetaStoreEndFunctionContext>(); - - public DummyEndFunctionListener(Configuration config) { - super(config); - } - - @Override - public void onEndFunction(String functionName, MetaStoreEndFunctionContext context) { - funcNameList.add(functionName); - contextList.add(context); - } - -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java deleted file mode 100644 index 182e724..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.events.AddIndexEvent; -import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; -import org.apache.hadoop.hive.metastore.events.AlterIndexEvent; -import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; -import org.apache.hadoop.hive.metastore.events.AlterTableEvent; -import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent; -import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent; -import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent; -import org.apache.hadoop.hive.metastore.events.CreateTableEvent; -import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent; -import org.apache.hadoop.hive.metastore.events.DropFunctionEvent; -import org.apache.hadoop.hive.metastore.events.DropIndexEvent; -import org.apache.hadoop.hive.metastore.events.DropPartitionEvent; -import org.apache.hadoop.hive.metastore.events.DropTableEvent; -import org.apache.hadoop.hive.metastore.events.ListenerEvent; -import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent; - -/** A dummy implementation for - * {@link org.apache.hadoop.hive.metastore.MetaStoreEventListener} - * for testing purposes. - */ -public class DummyListener extends MetaStoreEventListener{ - - public static final List<ListenerEvent> notifyList = new ArrayList<ListenerEvent>(); - - /** - * @return The last event received, or null if no event was received. - */ - public static ListenerEvent getLastEvent() { - if (notifyList.isEmpty()) { - return null; - } else { - return notifyList.get(notifyList.size() - 1); - } - } - - public DummyListener(Configuration config) { - super(config); - } - - @Override - public void onConfigChange(ConfigChangeEvent configChange) { - addEvent(configChange); - } - - @Override - public void onAddPartition(AddPartitionEvent partition) throws MetaException { - addEvent(partition); - } - - @Override - public void onCreateDatabase(CreateDatabaseEvent db) throws MetaException { - addEvent(db); - } - - @Override - public void onCreateTable(CreateTableEvent table) throws MetaException { - addEvent(table); - } - - @Override - public void onDropDatabase(DropDatabaseEvent db) throws MetaException { - addEvent(db); - } - - @Override - public void onDropPartition(DropPartitionEvent partition) throws MetaException { - addEvent(partition); - } - - @Override - public void onDropTable(DropTableEvent table) throws MetaException { - addEvent(table); - } - - @Override - public void onAlterTable(AlterTableEvent event) throws MetaException { - addEvent(event); - } - - @Override - public void onAlterPartition(AlterPartitionEvent event) throws MetaException { - addEvent(event); - } - - @Override - public void onLoadPartitionDone(LoadPartitionDoneEvent partEvent) throws MetaException { - addEvent(partEvent); - } - - @Override - public void onAddIndex(AddIndexEvent indexEvent) throws MetaException { - addEvent(indexEvent); - } - - @Override - public void onDropIndex(DropIndexEvent indexEvent) throws MetaException { - addEvent(indexEvent); - } - - @Override - public void onAlterIndex(AlterIndexEvent indexEvent) throws MetaException { - addEvent(indexEvent); - } - - @Override - public void onCreateFunction (CreateFunctionEvent fnEvent) throws MetaException { - addEvent(fnEvent); - } - - @Override - public void onDropFunction (DropFunctionEvent fnEvent) throws MetaException { - addEvent(fnEvent); - } - - private void addEvent(ListenerEvent event) { - notifyList.add(event); - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java deleted file mode 100644 index 2eb8354..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreInitContext; -import org.apache.hadoop.hive.metastore.MetaStoreInitListener; -import org.apache.hadoop.hive.metastore.api.MetaException; - -/* - * An implementation of MetaStoreInitListener to verify onInit is called when - * HMSHandler is initialized - */ -public class DummyMetaStoreInitListener extends MetaStoreInitListener{ - - public static boolean wasCalled = false; - public DummyMetaStoreInitListener(Configuration config) { - super(config); - } - - @Override - public void onInit(MetaStoreInitContext context) throws MetaException { - wasCalled = true; - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/DummyPreListener.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyPreListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyPreListener.java deleted file mode 100644 index 7ff6f92..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyPreListener.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.metastore; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import org.apache.hadoop.hive.metastore.events.PreEventContext; - -/** - * - * DummyPreListener. - * - * An implementation of MetaStorePreEventListener which stores the Events it's seen in a list. - */ -public class DummyPreListener extends MetaStorePreEventListener { - - public static final List<PreEventContext> notifyList = new ArrayList<PreEventContext>(); - - public DummyPreListener(Configuration config) { - super(config); - } - - @Override - public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectException, - InvalidOperationException { - notifyList.add(context); - } - -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java deleted file mode 100644 index 4f7b9c8..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ /dev/null @@ -1,1053 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.api.AggrStats; -import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; -import org.apache.hadoop.hive.metastore.api.ColumnStatistics; -import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; -import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; -import org.apache.hadoop.hive.metastore.api.Function; -import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; -import org.apache.hadoop.hive.metastore.api.Index; -import org.apache.hadoop.hive.metastore.api.InvalidInputException; -import org.apache.hadoop.hive.metastore.api.InvalidObjectException; -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import org.apache.hadoop.hive.metastore.api.NotificationEvent; -import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; -import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; -import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest; -import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse; -import org.apache.hadoop.hive.metastore.api.Partition; -import org.apache.hadoop.hive.metastore.api.PartitionEventType; -import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse; -import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; -import org.apache.hadoop.hive.metastore.api.PrincipalType; -import org.apache.hadoop.hive.metastore.api.PrivilegeBag; -import org.apache.hadoop.hive.metastore.api.WMResourcePlan; -import org.apache.hadoop.hive.metastore.api.WMTrigger; -import org.apache.hadoop.hive.metastore.api.Role; -import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; -import org.apache.hadoop.hive.metastore.api.SQLForeignKey; -import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; -import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; -import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.metastore.api.TableMeta; -import org.apache.hadoop.hive.metastore.api.Type; -import org.apache.hadoop.hive.metastore.api.UnknownDBException; -import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; -import org.apache.hadoop.hive.metastore.api.UnknownTableException; -import org.apache.hadoop.hive.metastore.api.WMMapping; -import org.apache.hadoop.hive.metastore.api.WMPool; -import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; -import org.apache.thrift.TException; - -/** - * A wrapper around {@link org.apache.hadoop.hive.metastore.ObjectStore} - * with the ability to control the result of commitTransaction(). - * All other functions simply delegate to an embedded ObjectStore object. - * Ideally, we should have just extended ObjectStore instead of using - * delegation. However, since HiveMetaStore uses a Proxy, this class must - * not inherit from any other class. - */ -public class DummyRawStoreControlledCommit implements RawStore, Configurable { - - private final ObjectStore objectStore; - public DummyRawStoreControlledCommit() { - objectStore = new ObjectStore(); - } - - /** - * If true, shouldCommit() will simply call delegate commitTransaction() to the - * underlying ObjectStore. - * If false, shouldCommit() immediately returns false. - */ - private static boolean shouldCommitSucceed = true; - public static void setCommitSucceed(boolean flag) { - shouldCommitSucceed = flag; - } - - @Override - public boolean commitTransaction() { - if (shouldCommitSucceed) { - return objectStore.commitTransaction(); - } else { - return false; - } - } - - @Override - public boolean isActiveTransaction() { - return false; - } - - // All remaining functions simply delegate to objectStore - - @Override - public Configuration getConf() { - return objectStore.getConf(); - } - - @Override - public void setConf(Configuration conf) { - objectStore.setConf(conf); - } - - @Override - public void shutdown() { - objectStore.shutdown(); - } - - @Override - public boolean openTransaction() { - return objectStore.openTransaction(); - } - - @Override - public void rollbackTransaction() { - objectStore.rollbackTransaction(); - } - - @Override - public void createDatabase(Database db) throws InvalidObjectException, MetaException { - objectStore.createDatabase(db); - } - - @Override - public Database getDatabase(String dbName) throws NoSuchObjectException { - return objectStore.getDatabase(dbName); - } - - @Override - public boolean dropDatabase(String dbName) - throws NoSuchObjectException, MetaException { - return objectStore.dropDatabase(dbName); - } - - @Override - public boolean alterDatabase(String dbName, Database db) - throws NoSuchObjectException, MetaException { - - return objectStore.alterDatabase(dbName, db); - } - - @Override - public List<String> getDatabases(String pattern) throws MetaException { - return objectStore.getDatabases(pattern); - } - - @Override - public List<String> getAllDatabases() throws MetaException { - return objectStore.getAllDatabases(); - } - - @Override - public boolean createType(Type type) { - return objectStore.createType(type); - } - - @Override - public Type getType(String typeName) { - return objectStore.getType(typeName); - } - - @Override - public boolean dropType(String typeName) { - return objectStore.dropType(typeName); - } - - @Override - public void createTable(Table tbl) throws InvalidObjectException, MetaException { - objectStore.createTable(tbl); - } - - @Override - public boolean dropTable(String dbName, String tableName) - throws MetaException, NoSuchObjectException, - InvalidObjectException, InvalidInputException { - return objectStore.dropTable(dbName, tableName); - } - - @Override - public Table getTable(String dbName, String tableName) throws MetaException { - return objectStore.getTable(dbName, tableName); - } - - @Override - public boolean addPartition(Partition part) - throws InvalidObjectException, MetaException { - return objectStore.addPartition(part); - } - - @Override - public Partition getPartition(String dbName, String tableName, List<String> partVals) - throws MetaException, NoSuchObjectException { - return objectStore.getPartition(dbName, tableName, partVals); - } - - @Override - public boolean dropPartition(String dbName, String tableName, List<String> partVals) - throws MetaException, NoSuchObjectException, - InvalidObjectException, InvalidInputException { - return objectStore.dropPartition(dbName, tableName, partVals); - } - - @Override - public List<Partition> getPartitions(String dbName, String tableName, int max) - throws MetaException, NoSuchObjectException { - return objectStore.getPartitions(dbName, tableName, max); - } - - @Override - public void alterTable(String dbName, String name, Table newTable) - throws InvalidObjectException, MetaException { - objectStore.alterTable(dbName, name, newTable); - } - - @Override - public List<String> getTables(String dbName, String pattern) throws MetaException { - return objectStore.getTables(dbName, pattern); - } - - @Override - public List<String> getTables(String dbName, String pattern, TableType tableType) throws MetaException { - return objectStore.getTables(dbName, pattern, tableType); - } - - @Override - public List<TableMeta> getTableMeta(String dbNames, String tableNames, List<String> tableTypes) - throws MetaException { - return objectStore.getTableMeta(dbNames, tableNames, tableTypes); - } - - @Override - public List<Table> getTableObjectsByName(String dbName, List<String> tableNames) - throws MetaException, UnknownDBException { - return objectStore.getTableObjectsByName(dbName, tableNames); - } - - @Override - public List<String> getAllTables(String dbName) throws MetaException { - return objectStore.getAllTables(dbName); - } - - @Override - public List<String> listTableNamesByFilter(String dbName, String filter, - short maxTables) throws MetaException, UnknownDBException { - return objectStore.listTableNamesByFilter(dbName, filter, maxTables); - } - - @Override - public List<String> listPartitionNames(String dbName, String tblName, short maxParts) - throws MetaException { - return objectStore.listPartitionNames(dbName, tblName, maxParts); - } - - @Override - public PartitionValuesResponse listPartitionValues(String db_name, String tbl_name, List<FieldSchema> cols, boolean applyDistinct, String filter, boolean ascending, List<FieldSchema> order, long maxParts) throws MetaException { - return null; - } - - @Override - public List<String> listPartitionNamesByFilter(String dbName, String tblName, - String filter, short maxParts) throws MetaException { - return objectStore.listPartitionNamesByFilter(dbName, tblName, filter, maxParts); - } - - @Override - public void alterPartition(String dbName, String tblName, List<String> partVals, - Partition newPart) throws InvalidObjectException, MetaException { - objectStore.alterPartition(dbName, tblName, partVals, newPart); - } - - @Override - public void alterPartitions(String dbName, String tblName, - List<List<String>> partValsList, List<Partition> newParts) - throws InvalidObjectException, MetaException { - objectStore.alterPartitions(dbName, tblName, partValsList, newParts); - } - - @Override - public boolean addIndex(Index index) throws InvalidObjectException, MetaException { - return objectStore.addIndex(index); - } - - @Override - public Index getIndex(String dbName, String origTableName, String indexName) - throws MetaException { - return objectStore.getIndex(dbName, origTableName, indexName); - } - - @Override - public boolean dropIndex(String dbName, String origTableName, String indexName) - throws MetaException { - return objectStore.dropIndex(dbName, origTableName, indexName); - } - - @Override - public List<Index> getIndexes(String dbName, String origTableName, int max) - throws MetaException { - return objectStore.getIndexes(dbName, origTableName, max); - } - - @Override - public List<String> listIndexNames(String dbName, String origTableName, short max) - throws MetaException { - return objectStore.listIndexNames(dbName, origTableName, max); - } - - @Override - public void alterIndex(String dbName, String baseTblName, String name, Index newIndex) - throws InvalidObjectException, MetaException { - objectStore.alterIndex(dbName, baseTblName, name, newIndex); - } - - @Override - public List<Partition> getPartitionsByFilter(String dbName, String tblName, - String filter, short maxParts) throws MetaException, NoSuchObjectException { - return objectStore.getPartitionsByFilter(dbName, tblName, filter, maxParts); - } - - @Override - public int getNumPartitionsByFilter(String dbName, String tblName, - String filter) throws MetaException, NoSuchObjectException { - return objectStore.getNumPartitionsByFilter(dbName, tblName, filter); - } - - @Override - public int getNumPartitionsByExpr(String dbName, String tblName, - byte[] expr) throws MetaException, NoSuchObjectException { - return objectStore.getNumPartitionsByExpr(dbName, tblName, expr); - } - - @Override - public List<Partition> getPartitionsByNames(String dbName, String tblName, - List<String> partNames) throws MetaException, NoSuchObjectException { - return objectStore.getPartitionsByNames(dbName, tblName, partNames); - } - - @Override - public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr, - String defaultPartitionName, short maxParts, List<Partition> result) throws TException { - return objectStore.getPartitionsByExpr( - dbName, tblName, expr, defaultPartitionName, maxParts, result); - } - - @Override - public Table markPartitionForEvent(String dbName, String tblName, - Map<String, String> partVals, PartitionEventType evtType) - throws MetaException, UnknownTableException, InvalidPartitionException, - UnknownPartitionException { - return objectStore.markPartitionForEvent(dbName, tblName, partVals, evtType); - } - - @Override - public boolean isPartitionMarkedForEvent(String dbName, String tblName, - Map<String, String> partName, PartitionEventType evtType) - throws MetaException, UnknownTableException, InvalidPartitionException, - UnknownPartitionException { - return objectStore.isPartitionMarkedForEvent(dbName, tblName, partName, evtType); - } - - @Override - public boolean addRole(String rowName, String ownerName) throws InvalidObjectException, - MetaException, NoSuchObjectException { - return objectStore.addRole(rowName, ownerName); - } - - @Override - public boolean removeRole(String roleName) - throws MetaException, NoSuchObjectException { - return objectStore.removeRole(roleName); - } - - @Override - public boolean grantRole(Role role, String userName, PrincipalType principalType, - String grantor, PrincipalType grantorType, boolean grantOption) - throws MetaException, NoSuchObjectException, InvalidObjectException { - return objectStore.grantRole(role, userName, principalType, grantor, grantorType, - grantOption); - } - - @Override - public boolean revokeRole(Role role, String userName, PrincipalType principalType, boolean grantOption) - throws MetaException, NoSuchObjectException { - return objectStore.revokeRole(role, userName, principalType, grantOption); - } - - @Override - public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, - List<String> groupNames) throws InvalidObjectException, MetaException { - return objectStore.getUserPrivilegeSet(userName, groupNames); - } - - @Override - public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, String userName, - List<String> groupNames) throws InvalidObjectException, MetaException { - return objectStore.getDBPrivilegeSet(dbName, userName, groupNames); - } - - @Override - public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, String tableName, - String userName, List<String> groupNames) - throws InvalidObjectException, MetaException { - return objectStore.getTablePrivilegeSet(dbName, tableName, userName, groupNames); - } - - @Override - public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, String tableName, - String partition, String userName, List<String> groupNames) - throws InvalidObjectException, MetaException { - return objectStore.getPartitionPrivilegeSet(dbName, tableName, partition, - userName, groupNames); - } - - @Override - public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableName, - String partitionName, String columnName, String userName, List<String> groupNames) - throws InvalidObjectException, MetaException { - return objectStore.getColumnPrivilegeSet(dbName, tableName, partitionName, - columnName, userName, groupNames); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalGlobalGrants(String principalName, - PrincipalType principalType) { - return objectStore.listPrincipalGlobalGrants(principalName, principalType); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalDBGrants(String principalName, - PrincipalType principalType, String dbName) { - return objectStore.listPrincipalDBGrants(principalName, principalType, dbName); - } - - @Override - public List<HiveObjectPrivilege> listAllTableGrants(String principalName, - PrincipalType principalType, String dbName, String tableName) { - return objectStore.listAllTableGrants(principalName, principalType, - dbName, tableName); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalPartitionGrants(String principalName, - PrincipalType principalType, String dbName, String tableName, List<String> partValues, - String partName) { - return objectStore.listPrincipalPartitionGrants(principalName, principalType, - dbName, tableName, partValues, partName); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalTableColumnGrants(String principalName, - PrincipalType principalType, String dbName, String tableName, String columnName) { - return objectStore.listPrincipalTableColumnGrants(principalName, principalType, - dbName, tableName, columnName); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrants( - String principalName, PrincipalType principalType, String dbName, String tableName, - List<String> partVals, String partName, String columnName) { - return objectStore.listPrincipalPartitionColumnGrants(principalName, principalType, - dbName, tableName, partVals, partName, columnName); - } - - @Override - public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, - MetaException, NoSuchObjectException { - return objectStore.grantPrivileges(privileges); - } - - @Override - public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) - throws InvalidObjectException, MetaException, NoSuchObjectException { - return objectStore.revokePrivileges(privileges, grantOption); - } - - @Override - public Role getRole(String roleName) throws NoSuchObjectException { - return objectStore.getRole(roleName); - } - - @Override - public List<String> listRoleNames() { - return objectStore.listRoleNames(); - } - - @Override - public List<Role> listRoles(String principalName, PrincipalType principalType) { - return objectStore.listRoles(principalName, principalType); - } - - @Override - public List<RolePrincipalGrant> listRolesWithGrants(String principalName, - PrincipalType principalType) { - return objectStore.listRolesWithGrants(principalName, principalType); - } - - @Override - public List<RolePrincipalGrant> listRoleMembers(String roleName) { - return objectStore.listRoleMembers(roleName); - } - - @Override - public Partition getPartitionWithAuth(String dbName, String tblName, - List<String> partVals, String userName, List<String> groupNames) - throws MetaException, NoSuchObjectException, InvalidObjectException { - return objectStore.getPartitionWithAuth(dbName, tblName, partVals, userName, - groupNames); - } - - @Override - public List<Partition> getPartitionsWithAuth(String dbName, String tblName, - short maxParts, String userName, List<String> groupNames) - throws MetaException, NoSuchObjectException, InvalidObjectException { - return objectStore.getPartitionsWithAuth(dbName, tblName, maxParts, userName, - groupNames); - } - - @Override - public List<String> listPartitionNamesPs(String dbName, String tblName, - List<String> partVals, short maxParts) - throws MetaException, NoSuchObjectException { - return objectStore.listPartitionNamesPs(dbName, tblName, partVals, maxParts); - } - - @Override - public List<Partition> listPartitionsPsWithAuth(String dbName, String tblName, - List<String> partVals, short maxParts, String userName, List<String> groupNames) - throws MetaException, InvalidObjectException, NoSuchObjectException { - return objectStore.listPartitionsPsWithAuth(dbName, tblName, partVals, maxParts, - userName, groupNames); - } - - @Override - public long cleanupEvents() { - return objectStore.cleanupEvents(); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalDBGrantsAll( - String principalName, PrincipalType principalType) { - return objectStore.listPrincipalDBGrantsAll(principalName, principalType); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalTableGrantsAll( - String principalName, PrincipalType principalType) { - return objectStore.listPrincipalTableGrantsAll(principalName, principalType); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll( - String principalName, PrincipalType principalType) { - return objectStore.listPrincipalPartitionGrantsAll(principalName, principalType); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll( - String principalName, PrincipalType principalType) { - return objectStore.listPrincipalTableColumnGrantsAll(principalName, principalType); - } - - @Override - public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll( - String principalName, PrincipalType principalType) { - return objectStore.listPrincipalPartitionColumnGrantsAll(principalName, principalType); - } - - @Override - public List<HiveObjectPrivilege> listGlobalGrantsAll() { - return objectStore.listGlobalGrantsAll(); - } - - @Override - public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) { - return objectStore.listDBGrantsAll(dbName); - } - - @Override - public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(String dbName, String tableName, - String partitionName, String columnName) { - return objectStore.listPartitionColumnGrantsAll(dbName, tableName, partitionName, columnName); - } - - @Override - public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) { - return objectStore.listTableGrantsAll(dbName, tableName); - } - - @Override - public List<HiveObjectPrivilege> listPartitionGrantsAll(String dbName, String tableName, - String partitionName) { - return objectStore.listPartitionGrantsAll(dbName, tableName, partitionName); - } - - @Override - public List<HiveObjectPrivilege> listTableColumnGrantsAll(String dbName, String tableName, - String columnName) { - return objectStore.listTableColumnGrantsAll(dbName, tableName, columnName); - } - - @Override - public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, - List<String> colNames) throws MetaException, NoSuchObjectException { - return objectStore.getTableColumnStatistics(dbName, tableName, colNames); - } - - @Override - public boolean deleteTableColumnStatistics(String dbName, String tableName, - String colName) - throws NoSuchObjectException, MetaException, InvalidObjectException, - InvalidInputException { - return objectStore.deleteTableColumnStatistics(dbName, tableName, colName); - } - - @Override - public boolean deletePartitionColumnStatistics(String dbName, String tableName, - String partName, List<String> partVals, String colName) - throws NoSuchObjectException, MetaException, InvalidObjectException, - InvalidInputException { - return objectStore.deletePartitionColumnStatistics(dbName, tableName, partName, - partVals, colName); - } - - @Override - public boolean updateTableColumnStatistics(ColumnStatistics statsObj) - throws NoSuchObjectException, MetaException, InvalidObjectException, - InvalidInputException { - return objectStore.updateTableColumnStatistics(statsObj); - } - - @Override - public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj, - List<String> partVals) - throws NoSuchObjectException, MetaException, InvalidObjectException, - InvalidInputException { - return objectStore.updatePartitionColumnStatistics(statsObj, partVals); - } - - @Override - public boolean addToken(String tokenIdentifier, String delegationToken) { - return false; - } - - @Override - public boolean removeToken(String tokenIdentifier) { - return false; - } - - @Override - public String getToken(String tokenIdentifier) { - return ""; - } - - @Override - public List<String> getAllTokenIdentifiers() { - return new ArrayList<String>(); - } - - @Override - public int addMasterKey(String key) throws MetaException { - return -1; - } - - @Override - public void updateMasterKey(Integer seqNo, String key) - throws NoSuchObjectException, MetaException {} - - @Override - public boolean removeMasterKey(Integer keySeq) { - return false; - } - - @Override - public String[] getMasterKeys() { - return new String[0]; - } - - @Override - public void verifySchema() throws MetaException { - } - - @Override - public String getMetaStoreSchemaVersion() throws MetaException { - return objectStore.getMetaStoreSchemaVersion(); - } - - @Override - public void setMetaStoreSchemaVersion(String schemaVersion, String comment) throws MetaException { - objectStore.setMetaStoreSchemaVersion(schemaVersion, comment); - - } - - @Override - public List<ColumnStatistics> getPartitionColumnStatistics(String dbName, - String tblName, List<String> colNames, List<String> partNames) - throws MetaException, NoSuchObjectException { - return objectStore.getPartitionColumnStatistics(dbName, tblName , colNames, partNames); - } - - @Override - public boolean doesPartitionExist(String dbName, String tableName, - List<String> partVals) throws MetaException, NoSuchObjectException { - return objectStore.doesPartitionExist(dbName, tableName, partVals); - } - - @Override - public boolean addPartitions(String dbName, String tblName, List<Partition> parts) - throws InvalidObjectException, MetaException { - return objectStore.addPartitions(dbName, tblName, parts); - } - - @Override - public boolean addPartitions(String dbName, String tblName, PartitionSpecProxy partitionSpec, boolean ifNotExists) throws InvalidObjectException, MetaException { - return false; - } - - @Override - public void dropPartitions(String dbName, String tblName, List<String> partNames) - throws MetaException, NoSuchObjectException { - objectStore.dropPartitions(dbName, tblName, partNames); - } - - @Override - public void createFunction(Function func) throws InvalidObjectException, - MetaException { - objectStore.createFunction(func); - } - - @Override - public void alterFunction(String dbName, String funcName, Function newFunction) - throws InvalidObjectException, MetaException { - objectStore.alterFunction(dbName, funcName, newFunction); - } - - @Override - public void dropFunction(String dbName, String funcName) - throws MetaException, NoSuchObjectException, InvalidObjectException, - InvalidInputException { - objectStore.dropFunction(dbName, funcName); - } - - @Override - public Function getFunction(String dbName, String funcName) - throws MetaException { - return objectStore.getFunction(dbName, funcName); - } - - @Override - public List<Function> getAllFunctions() - throws MetaException { - return Collections.emptyList(); - } - - @Override - public List<String> getFunctions(String dbName, String pattern) - throws MetaException { - return objectStore.getFunctions(dbName, pattern); - } - - @Override - public AggrStats get_aggr_stats_for(String dbName, - String tblName, List<String> partNames, List<String> colNames) - throws MetaException { - return null; - } - - @Override - public NotificationEventResponse getNextNotification(NotificationEventRequest rqst) { - return objectStore.getNextNotification(rqst); - } - - @Override - public void addNotificationEvent(NotificationEvent event) { - objectStore.addNotificationEvent(event); - } - - @Override - public void cleanNotificationEvents(int olderThan) { - objectStore.cleanNotificationEvents(olderThan); - } - - @Override - public CurrentNotificationEventId getCurrentNotificationEventId() { - return objectStore.getCurrentNotificationEventId(); - } - - @Override - public NotificationEventsCountResponse getNotificationEventsCount(NotificationEventsCountRequest rqst) { - return objectStore.getNotificationEventsCount(rqst); - } - - @Override - public void flushCache() { - objectStore.flushCache(); - } - - @Override - public ByteBuffer[] getFileMetadata(List<Long> fileIds) { - return null; - } - - @Override - public void putFileMetadata( - List<Long> fileIds, List<ByteBuffer> metadata, FileMetadataExprType type) { - } - - @Override - public boolean isFileMetadataSupported() { - return false; - } - - - @Override - public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr, - ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) { - } - - @Override - public int getTableCount() throws MetaException { - return objectStore.getTableCount(); - } - - @Override - public int getPartitionCount() throws MetaException { - return objectStore.getPartitionCount(); - } - - @Override - public int getDatabaseCount() throws MetaException { - return objectStore.getDatabaseCount(); - } - - @Override - public FileMetadataHandler getFileMetadataHandler(FileMetadataExprType type) { - return null; - } - - @Override - public List<SQLPrimaryKey> getPrimaryKeys(String db_name, String tbl_name) - throws MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public List<SQLForeignKey> getForeignKeys(String parent_db_name, - String parent_tbl_name, String foreign_db_name, String foreign_tbl_name) - throws MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public List<SQLUniqueConstraint> getUniqueConstraints(String db_name, String tbl_name) - throws MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public List<SQLNotNullConstraint> getNotNullConstraints(String db_name, String tbl_name) - throws MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public List<String> createTableWithConstraints(Table tbl, - List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, - List<SQLUniqueConstraint> uniqueConstraints, - List<SQLNotNullConstraint> notNullConstraints) - throws InvalidObjectException, MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public void dropConstraint(String dbName, String tableName, - String constraintName) throws NoSuchObjectException { - // TODO Auto-generated method stub - } - - @Override - public List<String> addPrimaryKeys(List<SQLPrimaryKey> pks) - throws InvalidObjectException, MetaException { - return null; - } - - @Override - public List<String> addForeignKeys(List<SQLForeignKey> fks) - throws InvalidObjectException, MetaException { - return null; - } - - @Override - public List<String> addUniqueConstraints(List<SQLUniqueConstraint> uks) - throws InvalidObjectException, MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public List<String> addNotNullConstraints(List<SQLNotNullConstraint> nns) - throws InvalidObjectException, MetaException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Map<String, List<ColumnStatisticsObj>> getColStatsForTablePartitions(String dbName, - String tableName) throws MetaException, NoSuchObjectException { - // TODO Auto-generated method stub - return null; - } - - @Override - public String getMetastoreDbUuid() throws MetaException { - throw new MetaException("Get metastore uuid is not implemented"); - } - - @Override - public void createResourcePlan(WMResourcePlan resourcePlan, int defaultPoolSize) - throws AlreadyExistsException, InvalidObjectException, MetaException { - objectStore.createResourcePlan(resourcePlan, defaultPoolSize); - } - - @Override - public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException { - return objectStore.getResourcePlan(name); - } - - @Override - public List<WMResourcePlan> getAllResourcePlans() throws MetaException { - return objectStore.getAllResourcePlans(); - } - - @Override - public WMFullResourcePlan alterResourcePlan(String name, WMResourcePlan resourcePlan, - boolean canActivateDisabled) - throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, - MetaException { - return objectStore.alterResourcePlan(name, resourcePlan, canActivateDisabled); - } - - @Override - public WMFullResourcePlan getActiveResourcePlan() throws MetaException { - return objectStore.getActiveResourcePlan(); - } - - @Override - public List<String> validateResourcePlan(String name) - throws NoSuchObjectException, InvalidObjectException, MetaException { - return objectStore.validateResourcePlan(name); - } - - @Override - public void dropResourcePlan(String name) throws NoSuchObjectException, MetaException { - objectStore.dropResourcePlan(name); - } - - @Override - public void createWMTrigger(WMTrigger trigger) - throws AlreadyExistsException, MetaException, NoSuchObjectException, - InvalidOperationException { - objectStore.createWMTrigger(trigger); - } - - @Override - public void alterWMTrigger(WMTrigger trigger) - throws NoSuchObjectException, InvalidOperationException, MetaException { - objectStore.alterWMTrigger(trigger); - } - - @Override - public void dropWMTrigger(String resourcePlanName, String triggerName) - throws NoSuchObjectException, InvalidOperationException, MetaException { - objectStore.dropWMTrigger(resourcePlanName, triggerName); - } - - @Override - public List<WMTrigger> getTriggersForResourcePlan(String resourcePlanName) - throws NoSuchObjectException, MetaException { - return objectStore.getTriggersForResourcePlan(resourcePlanName); - } - - @Override - public void createPool(WMPool pool) throws AlreadyExistsException, NoSuchObjectException, - InvalidOperationException, MetaException { - objectStore.createPool(pool); - } - - @Override - public void alterPool(WMPool pool, String poolPath) throws AlreadyExistsException, - NoSuchObjectException, InvalidOperationException, MetaException { - objectStore.alterPool(pool, poolPath); - } - - @Override - public void dropWMPool(String resourcePlanName, String poolPath) - throws NoSuchObjectException, InvalidOperationException, MetaException { - objectStore.dropWMPool(resourcePlanName, poolPath); - } - - @Override - public void createOrUpdateWMMapping(WMMapping mapping, boolean update) - throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, - MetaException { - objectStore.createOrUpdateWMMapping(mapping, update); - } - - @Override - public void dropWMMapping(WMMapping mapping) - throws NoSuchObjectException, InvalidOperationException, MetaException { - objectStore.dropWMMapping(mapping); - } - - @Override - public void createWMTriggerToPoolMapping(String resourcePlanName, String triggerName, - String poolPath) throws AlreadyExistsException, NoSuchObjectException, - InvalidOperationException, MetaException { - objectStore.createWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath); - } - - @Override - public void dropWMTriggerToPoolMapping(String resourcePlanName, String triggerName, - String poolPath) throws NoSuchObjectException, InvalidOperationException, MetaException { - objectStore.dropWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath); - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java deleted file mode 100644 index d89c54c..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java +++ /dev/null @@ -1,104 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import java.util.List; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; -import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; -import org.apache.hadoop.hive.metastore.api.Table; - -import static org.junit.Assert.assertEquals; - - -/** - * A wrapper around {@link ObjectStore} that allows us to inject custom behaviour - * on to some of the methods for testing. - */ -public class InjectableBehaviourObjectStore extends ObjectStore { - public InjectableBehaviourObjectStore() { - super(); - } - - /** - * A utility class that allows people injecting behaviour to determine if their injections occurred. - */ - public static abstract class BehaviourInjection<T,F> - implements com.google.common.base.Function<T,F>{ - protected boolean injectionPathCalled = false; - protected boolean nonInjectedPathCalled = false; - - public void assertInjectionsPerformed( - boolean expectedInjectionCalled, boolean expectedNonInjectedPathCalled){ - assertEquals(expectedInjectionCalled, injectionPathCalled); - assertEquals(expectedNonInjectedPathCalled, nonInjectedPathCalled); - } - }; - - private static com.google.common.base.Function<Table,Table> getTableModifier = - com.google.common.base.Functions.identity(); - private static com.google.common.base.Function<List<String>, List<String>> listPartitionNamesModifier = - com.google.common.base.Functions.identity(); - private static com.google.common.base.Function<NotificationEventResponse, NotificationEventResponse> - getNextNotificationModifier = com.google.common.base.Functions.identity(); - - // Methods to set/reset getTable modifier - public static void setGetTableBehaviour(com.google.common.base.Function<Table,Table> modifier){ - getTableModifier = (modifier == null)? com.google.common.base.Functions.identity() : modifier; - } - - public static void resetGetTableBehaviour(){ - setGetTableBehaviour(null); - } - - // Methods to set/reset listPartitionNames modifier - public static void setListPartitionNamesBehaviour(com.google.common.base.Function<List<String>, List<String>> modifier){ - listPartitionNamesModifier = (modifier == null)? com.google.common.base.Functions.identity() : modifier; - } - - public static void resetListPartitionNamesBehaviour(){ - setListPartitionNamesBehaviour(null); - } - - // Methods to set/reset getNextNotification modifier - public static void setGetNextNotificationBehaviour( - com.google.common.base.Function<NotificationEventResponse,NotificationEventResponse> modifier){ - getNextNotificationModifier = (modifier == null)? com.google.common.base.Functions.identity() : modifier; - } - - public static void resetGetNextNotificationBehaviour(){ - setGetNextNotificationBehaviour(null); - } - - // ObjectStore methods to be overridden with injected behavior - @Override - public Table getTable(String dbName, String tableName) throws MetaException { - return getTableModifier.apply(super.getTable(dbName, tableName)); - } - - @Override - public List<String> listPartitionNames(String dbName, String tableName, short max) throws MetaException { - return listPartitionNamesModifier.apply(super.listPartitionNames(dbName, tableName, max)); - } - - @Override - public NotificationEventResponse getNextNotification(NotificationEventRequest rqst) { - return getNextNotificationModifier.apply(super.getNextNotification(rqst)); - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/IpAddressListener.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/IpAddressListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/IpAddressListener.java deleted file mode 100644 index e40edca..0000000 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/IpAddressListener.java +++ /dev/null @@ -1,103 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import java.net.InetAddress; -import java.net.UnknownHostException; - -import junit.framework.Assert; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; -import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; -import org.apache.hadoop.hive.metastore.events.AlterTableEvent; -import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent; -import org.apache.hadoop.hive.metastore.events.CreateTableEvent; -import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent; -import org.apache.hadoop.hive.metastore.events.DropPartitionEvent; -import org.apache.hadoop.hive.metastore.events.DropTableEvent; -import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent; - -/** An implementation for MetaStoreEventListener which checks that the IP Address stored in - * HMSHandler matches that of local host, for testing purposes. - */ -public class IpAddressListener extends MetaStoreEventListener{ - - private static final String LOCAL_HOST = "localhost"; - - public IpAddressListener(Configuration config) { - super(config); - } - - private void checkIpAddress() { - try { - String localhostIp = InetAddress.getByName(LOCAL_HOST).getHostAddress(); - Assert.assertEquals(localhostIp, HMSHandler.getThreadLocalIpAddress()); - } catch (UnknownHostException e) { - Assert.assertTrue("InetAddress.getLocalHost threw an exception: " + e.getMessage(), false); - } - } - - @Override - public void onAddPartition(AddPartitionEvent partition) throws MetaException { - checkIpAddress(); - } - - @Override - public void onCreateDatabase(CreateDatabaseEvent db) throws MetaException { - checkIpAddress(); - } - - @Override - public void onCreateTable(CreateTableEvent table) throws MetaException { - checkIpAddress(); - } - - @Override - public void onDropDatabase(DropDatabaseEvent db) throws MetaException { - checkIpAddress(); - } - - @Override - public void onDropPartition(DropPartitionEvent partition) throws MetaException { - checkIpAddress(); - } - - @Override - public void onDropTable(DropTableEvent table) throws MetaException { - checkIpAddress(); - } - - @Override - public void onAlterTable(AlterTableEvent event) throws MetaException { - checkIpAddress(); - } - - @Override - public void onAlterPartition(AlterPartitionEvent event) throws MetaException { - checkIpAddress(); - } - - @Override - public void onLoadPartitionDone(LoadPartitionDoneEvent partEvent) throws MetaException { - checkIpAddress(); - } -}