http://git-wip-us.apache.org/repos/asf/hive/blob/081fa368/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
deleted file mode 100644
index 953c5fd..0000000
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
+++ /dev/null
@@ -1,474 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore.security;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.framework.api.ACLProvider;
-import org.apache.curator.framework.imps.CuratorFrameworkState;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
-import org.apache.hadoop.security.UserGroupInformation;
-import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
-import 
org.apache.hadoop.security.token.delegation.MetastoreDelegationTokenSupport;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.ZooDefs.Ids;
-import org.apache.zookeeper.ZooDefs.Perms;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Id;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * ZooKeeper token store implementation.
- */
-public class ZooKeeperTokenStore implements DelegationTokenStore {
-
-  private static final Logger LOGGER =
-      LoggerFactory.getLogger(ZooKeeperTokenStore.class.getName());
-
-  protected static final String ZK_SEQ_FORMAT = "%010d";
-  private static final String NODE_KEYS = "/keys";
-  private static final String NODE_TOKENS = "/tokens";
-
-  private String rootNode = "";
-  private volatile CuratorFramework zkSession;
-  private String zkConnectString;
-  private int connectTimeoutMillis;
-  private List<ACL> newNodeAcl = Arrays.asList(new ACL(Perms.ALL, 
Ids.AUTH_IDS));
-
-  /**
-   * ACLProvider permissions will be used in case parent dirs need to be 
created
-   */
-  private final ACLProvider aclDefaultProvider =  new ACLProvider() {
-
-    @Override
-    public List<ACL> getDefaultAcl() {
-      return newNodeAcl;
-    }
-
-    @Override
-    public List<ACL> getAclForPath(String path) {
-      return getDefaultAcl();
-    }
-  };
-
-
-  private final String WHEN_ZK_DSTORE_MSG = "when zookeeper based delegation 
token storage is enabled"
-      + "(hive.cluster.delegation.token.store.class=" + 
ZooKeeperTokenStore.class.getName() + ")";
-
-  private Configuration conf;
-
-  private HadoopThriftAuthBridge.Server.ServerMode serverMode;
-
-  /**
-   * Default constructor for dynamic instantiation w/ Configurable
-   * (ReflectionUtils does not support Configuration constructor injection).
-   */
-  protected ZooKeeperTokenStore() {
-  }
-
-  private CuratorFramework getSession() {
-    if (zkSession == null || zkSession.getState() == 
CuratorFrameworkState.STOPPED) {
-      synchronized (this) {
-        if (zkSession == null || zkSession.getState() == 
CuratorFrameworkState.STOPPED) {
-          zkSession =
-              CuratorFrameworkFactory.builder().connectString(zkConnectString)
-                  
.connectionTimeoutMs(connectTimeoutMillis).aclProvider(aclDefaultProvider)
-                  .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
-          zkSession.start();
-        }
-      }
-    }
-    return zkSession;
-  }
-
-  private void setupJAASConfig(Configuration conf) throws IOException {
-    if (!UserGroupInformation.getLoginUser().isFromKeytab()) {
-      // The process has not logged in using keytab
-      // this should be a test mode, can't use keytab to authenticate
-      // with zookeeper.
-      LOGGER.warn("Login is not from keytab");
-      return;
-    }
-
-    String principal;
-    String keytab;
-    switch (serverMode) {
-    case METASTORE:
-      principal = getNonEmptyConfVar(conf, 
"hive.metastore.kerberos.principal");
-      keytab = getNonEmptyConfVar(conf, "hive.metastore.kerberos.keytab.file");
-      break;
-    case HIVESERVER2:
-      principal = getNonEmptyConfVar(conf, 
"hive.server2.authentication.kerberos.principal");
-      keytab = getNonEmptyConfVar(conf, 
"hive.server2.authentication.kerberos.keytab");
-      break;
-    default:
-      throw new AssertionError("Unexpected server mode " + serverMode);
-    }
-    SecurityUtils.setZookeeperClientKerberosJaasConfig(principal, keytab);
-  }
-
-  private String getNonEmptyConfVar(Configuration conf, String param) throws 
IOException {
-    String val = conf.get(param);
-    if (val == null || val.trim().isEmpty()) {
-      throw new IOException("Configuration parameter " + param + " should be 
set, "
-          + WHEN_ZK_DSTORE_MSG);
-    }
-    return val;
-  }
-
-  /**
-   * Create a path if it does not already exist ("mkdir -p")
-   * @param path string with '/' separator
-   * @param acl list of ACL entries
-   * @throws TokenStoreException
-   */
-  public void ensurePath(String path, List<ACL> acl)
-      throws TokenStoreException {
-    try {
-      CuratorFramework zk = getSession();
-      String node = 
zk.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT)
-          .withACL(acl).forPath(path);
-      LOGGER.info("Created path: {} ", node);
-    } catch (KeeperException.NodeExistsException e) {
-      // node already exists
-    } catch (Exception e) {
-      throw new TokenStoreException("Error creating path " + path, e);
-    }
-  }
-
-  /**
-   * Parse ACL permission string, from ZooKeeperMain private method
-   * @param permString
-   * @return
-   */
-  public static int getPermFromString(String permString) {
-      int perm = 0;
-      for (int i = 0; i < permString.length(); i++) {
-          switch (permString.charAt(i)) {
-          case 'r':
-              perm |= ZooDefs.Perms.READ;
-              break;
-          case 'w':
-              perm |= ZooDefs.Perms.WRITE;
-              break;
-          case 'c':
-              perm |= ZooDefs.Perms.CREATE;
-              break;
-          case 'd':
-              perm |= ZooDefs.Perms.DELETE;
-              break;
-          case 'a':
-              perm |= ZooDefs.Perms.ADMIN;
-              break;
-          default:
-              LOGGER.error("Unknown perm type: " + permString.charAt(i));
-          }
-      }
-      return perm;
-  }
-
-  /**
-   * Parse comma separated list of ACL entries to secure generated nodes, e.g.
-   * 
<code>sasl:hive/host1@MY.DOMAIN:cdrwa,sasl:hive/host2@MY.DOMAIN:cdrwa</code>
-   * @param aclString
-   * @return ACL list
-   */
-  public static List<ACL> parseACLs(String aclString) {
-    String[] aclComps = StringUtils.splitByWholeSeparator(aclString, ",");
-    List<ACL> acl = new ArrayList<ACL>(aclComps.length);
-    for (String a : aclComps) {
-      if (StringUtils.isBlank(a)) {
-         continue;
-      }
-      a = a.trim();
-      // from ZooKeeperMain private method
-      int firstColon = a.indexOf(':');
-      int lastColon = a.lastIndexOf(':');
-      if (firstColon == -1 || lastColon == -1 || firstColon == lastColon) {
-         LOGGER.error(a + " does not have the form scheme:id:perm");
-         continue;
-      }
-      ACL newAcl = new ACL();
-      newAcl.setId(new Id(a.substring(0, firstColon), a.substring(
-          firstColon + 1, lastColon)));
-      newAcl.setPerms(getPermFromString(a.substring(lastColon + 1)));
-      acl.add(newAcl);
-    }
-    return acl;
-  }
-
-  private void initClientAndPaths() {
-    if (this.zkSession != null) {
-      this.zkSession.close();
-    }
-    try {
-      ensurePath(rootNode + NODE_KEYS, newNodeAcl);
-      ensurePath(rootNode + NODE_TOKENS, newNodeAcl);
-    } catch (TokenStoreException e) {
-      throw e;
-    }
-  }
-
-  @Override
-  public void setConf(Configuration conf) {
-    if (conf == null) {
-      throw new IllegalArgumentException("conf is null");
-    }
-    this.conf = conf;
-  }
-
-  @Override
-  public Configuration getConf() {
-    return null; // not required
-  }
-
-  private Map<Integer, byte[]> getAllKeys() throws KeeperException, 
InterruptedException {
-
-    String masterKeyNode = rootNode + NODE_KEYS;
-
-    // get children of key node
-    List<String> nodes = zkGetChildren(masterKeyNode);
-
-    // read each child node, add to results
-    Map<Integer, byte[]> result = new HashMap<Integer, byte[]>();
-    for (String node : nodes) {
-      String nodePath = masterKeyNode + "/" + node;
-      byte[] data = zkGetData(nodePath);
-      if (data != null) {
-        result.put(getSeq(node), data);
-      }
-    }
-    return result;
-  }
-
-  private List<String> zkGetChildren(String path) {
-    CuratorFramework zk = getSession();
-    try {
-      return zk.getChildren().forPath(path);
-    } catch (Exception e) {
-      throw new TokenStoreException("Error getting children for " + path, e);
-    }
-  }
-
-  private byte[] zkGetData(String nodePath) {
-    CuratorFramework zk = getSession();
-    try {
-      return zk.getData().forPath(nodePath);
-    } catch (KeeperException.NoNodeException ex) {
-      return null;
-    } catch (Exception e) {
-      throw new TokenStoreException("Error reading " + nodePath, e);
-    }
-  }
-
-  private int getSeq(String path) {
-    String[] pathComps = path.split("/");
-    return Integer.parseInt(pathComps[pathComps.length-1]);
-  }
-
-  @Override
-  public int addMasterKey(String s) {
-    String keysPath = rootNode + NODE_KEYS + "/";
-    CuratorFramework zk = getSession();
-    String newNode;
-    try {
-      newNode = 
zk.create().withMode(CreateMode.PERSISTENT_SEQUENTIAL).withACL(newNodeAcl)
-          .forPath(keysPath, s.getBytes());
-    } catch (Exception e) {
-      throw new TokenStoreException("Error creating new node with path " + 
keysPath, e);
-    }
-    LOGGER.info("Added key {}", newNode);
-    return getSeq(newNode);
-  }
-
-  @Override
-  public void updateMasterKey(int keySeq, String s) {
-    CuratorFramework zk = getSession();
-    String keyPath = rootNode + NODE_KEYS + "/" + String.format(ZK_SEQ_FORMAT, 
keySeq);
-    try {
-      zk.setData().forPath(keyPath, s.getBytes());
-    } catch (Exception e) {
-      throw new TokenStoreException("Error setting data in " + keyPath, e);
-    }
-  }
-
-  @Override
-  public boolean removeMasterKey(int keySeq) {
-    String keyPath = rootNode + NODE_KEYS + "/" + String.format(ZK_SEQ_FORMAT, 
keySeq);
-    zkDelete(keyPath);
-    return true;
-  }
-
-  private void zkDelete(String path) {
-    CuratorFramework zk = getSession();
-    try {
-      zk.delete().forPath(path);
-    } catch (KeeperException.NoNodeException ex) {
-      // already deleted
-    } catch (Exception e) {
-      throw new TokenStoreException("Error deleting " + path, e);
-    }
-  }
-
-  @Override
-  public String[] getMasterKeys() {
-    try {
-      Map<Integer, byte[]> allKeys = getAllKeys();
-      String[] result = new String[allKeys.size()];
-      int resultIdx = 0;
-      for (byte[] keyBytes : allKeys.values()) {
-          result[resultIdx++] = new String(keyBytes);
-      }
-      return result;
-    } catch (KeeperException ex) {
-      throw new TokenStoreException(ex);
-    } catch (InterruptedException ex) {
-      throw new TokenStoreException(ex);
-    }
-  }
-
-
-  private String getTokenPath(DelegationTokenIdentifier tokenIdentifier) {
-    try {
-      return rootNode + NODE_TOKENS + "/"
-          + 
TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier);
-    } catch (IOException ex) {
-      throw new TokenStoreException("Failed to encode token identifier", ex);
-    }
-  }
-
-  @Override
-  public boolean addToken(DelegationTokenIdentifier tokenIdentifier,
-      DelegationTokenInformation token) {
-    byte[] tokenBytes = 
MetastoreDelegationTokenSupport.encodeDelegationTokenInformation(token);
-    String tokenPath = getTokenPath(tokenIdentifier);
-    CuratorFramework zk = getSession();
-    String newNode;
-    try {
-      newNode = zk.create().withMode(CreateMode.PERSISTENT).withACL(newNodeAcl)
-          .forPath(tokenPath, tokenBytes);
-    } catch (Exception e) {
-      throw new TokenStoreException("Error creating new node with path " + 
tokenPath, e);
-    }
-
-    LOGGER.info("Added token: {}", newNode);
-    return true;
-  }
-
-  @Override
-  public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) {
-    String tokenPath = getTokenPath(tokenIdentifier);
-    zkDelete(tokenPath);
-    return true;
-  }
-
-  @Override
-  public DelegationTokenInformation getToken(DelegationTokenIdentifier 
tokenIdentifier) {
-    byte[] tokenBytes = zkGetData(getTokenPath(tokenIdentifier));
-    if(tokenBytes == null) {
-      // The token is already removed.
-      return null;
-    }
-    try {
-      return 
MetastoreDelegationTokenSupport.decodeDelegationTokenInformation(tokenBytes);
-    } catch (Exception ex) {
-      throw new TokenStoreException("Failed to decode token", ex);
-    }
-  }
-
-  @Override
-  public List<DelegationTokenIdentifier> getAllDelegationTokenIdentifiers() {
-    String containerNode = rootNode + NODE_TOKENS;
-    final List<String> nodes = zkGetChildren(containerNode);
-    List<DelegationTokenIdentifier> result = new 
java.util.ArrayList<DelegationTokenIdentifier>(
-        nodes.size());
-    for (String node : nodes) {
-      DelegationTokenIdentifier id = new DelegationTokenIdentifier();
-      try {
-        TokenStoreDelegationTokenSecretManager.decodeWritable(id, node);
-        result.add(id);
-      } catch (Exception e) {
-        LOGGER.warn("Failed to decode token '{}'", node);
-      }
-    }
-    return result;
-  }
-
-  @Override
-  public void close() throws IOException {
-    if (this.zkSession != null) {
-      this.zkSession.close();
-    }
-  }
-
-  @Override
-  public void init(Object hmsHandler, HadoopThriftAuthBridge.Server.ServerMode 
sMode) {
-    this.serverMode = sMode;
-    zkConnectString =
-        
conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, 
null);
-    if (zkConnectString == null || zkConnectString.trim().isEmpty()) {
-      // try alternate config param
-      zkConnectString =
-          conf.get(
-              
MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE,
-              null);
-      if (zkConnectString == null || zkConnectString.trim().isEmpty()) {
-        throw new IllegalArgumentException("Zookeeper connect string has to be 
specified through "
-            + "either " + 
MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR
-            + " or "
-            + 
MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR_ALTERNATE
-            + WHEN_ZK_DSTORE_MSG);
-      }
-    }
-    connectTimeoutMillis =
-        conf.getInt(
-            
MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_CONNECT_TIMEOUTMILLIS,
-            CuratorFrameworkFactory.builder().getConnectionTimeoutMs());
-    String aclStr = 
conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ACL, null);
-    if (StringUtils.isNotBlank(aclStr)) {
-      this.newNodeAcl = parseACLs(aclStr);
-    }
-    rootNode =
-        
conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE,
-            
MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT) + 
serverMode;
-
-    try {
-      // Install the JAAS Configuration for the runtime
-      setupJAASConfig(conf);
-    } catch (IOException e) {
-      throw new TokenStoreException("Error setting up JAAS configuration for 
zookeeper client "
-          + e.getMessage(), e);
-    }
-    initClientAndPaths();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/081fa368/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
deleted file mode 100644
index a50c0a3..0000000
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
+++ /dev/null
@@ -1,490 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore.tools;
-
-import java.net.URI;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.ObjectStore;
-
-/**
- * This class provides Hive admins a tool to
- * - execute JDOQL against the metastore using DataNucleus
- * - perform HA name node upgrade
- */
-
-public class HiveMetaTool {
-
-  private static final Logger LOG = 
LoggerFactory.getLogger(HiveMetaTool.class.getName());
-  private final Options cmdLineOptions = new Options();
-  private ObjectStore objStore;
-  private boolean isObjStoreInitialized;
-
-  public HiveMetaTool() {
-    this.isObjStoreInitialized = false;
-  }
-
-  @SuppressWarnings("static-access")
-  private void init() {
-
-    System.out.println("Initializing HiveMetaTool..");
-
-    Option help = new Option("help", "print this message");
-    Option listFSRoot = new Option("listFSRoot", "print the current FS root 
locations");
-    Option executeJDOQL =
-        OptionBuilder.withArgName("query-string")
-            .hasArgs()
-            .withDescription("execute the given JDOQL query")
-            .create("executeJDOQL");
-
-    /* Ideally we want to specify the different arguments to updateLocation as 
separate argNames.
-     * However if we did that, HelpFormatter swallows all but the last 
argument. Note that this is
-     * a know issue with the HelpFormatter class that has not been fixed. We 
specify all arguments
-     * with a single argName to workaround this HelpFormatter bug.
-     */
-    Option updateFSRootLoc =
-        OptionBuilder
-            .withArgName("new-loc> " + "<old-loc")
-            .hasArgs(2)
-            .withDescription(
-                "Update FS root location in the metastore to new location.Both 
new-loc and " +
-                    "old-loc should be valid URIs with valid host names and 
schemes." +
-                    "When run with the dryRun option changes are displayed but 
are not " +
-                    "persisted. When run with the serdepropKey/tablePropKey 
option " +
-                    "updateLocation looks for the 
serde-prop-key/table-prop-key that is " +
-                    "specified and updates its value if found.")
-                    .create("updateLocation");
-    Option dryRun = new Option("dryRun" , "Perform a dry run of updateLocation 
changes.When " +
-      "run with the dryRun option updateLocation changes are displayed but not 
persisted. " +
-      "dryRun is valid only with the updateLocation option.");
-    Option serdePropKey =
-        OptionBuilder.withArgName("serde-prop-key")
-        .hasArgs()
-        .withValueSeparator()
-        .withDescription("Specify the key for serde property to be updated. 
serdePropKey option " +
-           "is valid only with updateLocation option.")
-        .create("serdePropKey");
-    Option tablePropKey =
-        OptionBuilder.withArgName("table-prop-key")
-        .hasArg()
-        .withValueSeparator()
-        .withDescription("Specify the key for table property to be updated. 
tablePropKey option " +
-          "is valid only with updateLocation option.")
-        .create("tablePropKey");
-    Option prepareAcidUpgrade =
-        OptionBuilder.withArgName("find-compactions")
-            .hasOptionalArg() //directory to output results to
-            .withDescription("Generates a set Compaction commands to run to 
prepare for Hive 2.x" +
-                " to 3.0 upgrade")
-            .create("prepareAcidUpgrade");
-
-    cmdLineOptions.addOption(help);
-    cmdLineOptions.addOption(listFSRoot);
-    cmdLineOptions.addOption(executeJDOQL);
-    cmdLineOptions.addOption(updateFSRootLoc);
-    cmdLineOptions.addOption(dryRun);
-    cmdLineOptions.addOption(serdePropKey);
-    cmdLineOptions.addOption(tablePropKey);
-    cmdLineOptions.addOption(prepareAcidUpgrade);
-  }
-
-  private void initObjectStore(Configuration conf) {
-    if (!isObjStoreInitialized) {
-      objStore = new ObjectStore();
-      objStore.setConf(conf);
-      isObjStoreInitialized = true;
-    }
-  }
-
-  private void shutdownObjectStore() {
-    if (isObjStoreInitialized) {
-      objStore.shutdown();
-      isObjStoreInitialized = false;
-    }
-  }
-
-  private void listFSRoot() {
-    Configuration conf = MetastoreConf.newMetastoreConf();
-    initObjectStore(conf);
-
-    Set<String> hdfsRoots = objStore.listFSRoots();
-    if (hdfsRoots != null) {
-      System.out.println("Listing FS Roots..");
-      for (String s : hdfsRoots) {
-        System.out.println(s);
-      }
-    } else {
-      System.err.println("Encountered error during listFSRoot - " +
-        "commit of JDO transaction failed");
-    }
-  }
-
-  private void executeJDOQLSelect(String query) {
-    Configuration conf = MetastoreConf.newMetastoreConf();
-    initObjectStore(conf);
-
-    System.out.println("Executing query: " + query);
-    try (ObjectStore.QueryWrapper queryWrapper = new 
ObjectStore.QueryWrapper()) {
-      Collection<?> result = objStore.executeJDOQLSelect(query, queryWrapper);
-      if (result != null) {
-        Iterator<?> iter = result.iterator();
-        while (iter.hasNext()) {
-          Object o = iter.next();
-          System.out.println(o.toString());
-        }
-      } else {
-        System.err.println("Encountered error during executeJDOQLSelect -" +
-            "commit of JDO transaction failed.");
-      }
-    }
-  }
-
-  private void executeJDOQLUpdate(String query) {
-    Configuration conf = MetastoreConf.newMetastoreConf();
-    initObjectStore(conf);
-
-    System.out.println("Executing query: " + query);
-    long numUpdated = objStore.executeJDOQLUpdate(query);
-    if (numUpdated >= 0) {
-      System.out.println("Number of records updated: " + numUpdated);
-    } else {
-      System.err.println("Encountered error during executeJDOQL -" +
-        "commit of JDO transaction failed.");
-    }
-  }
-
-  private int printUpdateLocations(Map<String, String> updateLocations) {
-    int count = 0;
-    for (String key: updateLocations.keySet()) {
-      String value = updateLocations.get(key);
-      System.out.println("old location: " + key + " new location: " + value);
-      count++;
-    }
-    return count;
-  }
-
-  private void 
printTblURIUpdateSummary(ObjectStore.UpdateMStorageDescriptorTblURIRetVal 
retVal,
-    boolean isDryRun) {
-    String tblName = "SDS";
-    String fieldName = "LOCATION";
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing 
updateMStorageDescriptorTblURI - " +
-          "commit of JDO transaction failed. Failed to update FSRoot locations 
in " +
-          fieldName + "field in " + tblName + " table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + 
"..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " 
table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " 
table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Warning: Found records with bad " + fieldName +  " 
in " +
-          tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
-      int numNullRecords = retVal.getNumNullRecords();
-      if (numNullRecords != 0) {
-        LOG.debug("Number of NULL location URI: " + numNullRecords +
-            ". This can happen for View or Index.");
-      }
-    }
-  }
-
-  private void 
printDatabaseURIUpdateSummary(ObjectStore.UpdateMDatabaseURIRetVal retVal,
-    boolean isDryRun) {
-    String tblName = "DBS";
-    String fieldName = "LOCATION_URI";
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing updateMDatabaseURI 
- " +
-          "commit of JDO transaction failed. Failed to update FSRoot locations 
in " +
-          fieldName + "field in " + tblName + " table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + 
"..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " 
table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " 
table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Warning: Found records with bad " + fieldName +  " 
in " +
-          tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
-    }
-  }
-
-  private void printPropURIUpdateSummary(ObjectStore.UpdatePropURIRetVal 
retVal, String
-      tablePropKey, boolean isDryRun, String tblName, String methodName) {
-    if (retVal == null) {
-      System.err.println("Encountered error while executing " + methodName + " 
- " +
-          "commit of JDO transaction failed. Failed to update FSRoot locations 
in " +
-          "value field corresponding to" + tablePropKey + " in " + tblName + " 
table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + 
"..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " 
table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " 
table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Warning: Found records with bad " + tablePropKey + 
 " key in " +
-            tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
-    }
-  }
-
-  private void printSerdePropURIUpdateSummary(ObjectStore.UpdateSerdeURIRetVal 
retVal,
-    String serdePropKey, boolean isDryRun) {
-    String tblName = "SERDE_PARAMS";
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing updateSerdeURI - " 
+
-        "commit of JDO transaction failed. Failed to update FSRoot locations 
in " +
-        "value field corresponding to " + serdePropKey + " in " + tblName + " 
table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + 
"..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " 
table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " 
table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Warning: Found records with bad " + serdePropKey + 
 " key in " +
-        tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
-    }
-  }
-
-  public void updateFSRootLocation(URI oldURI, URI newURI, String 
serdePropKey, String
-      tablePropKey, boolean isDryRun) {
-    Configuration conf = MetastoreConf.newMetastoreConf();
-    initObjectStore(conf);
-
-    System.out.println("Looking for LOCATION_URI field in DBS table to 
update..");
-    ObjectStore.UpdateMDatabaseURIRetVal updateMDBURIRetVal = 
objStore.updateMDatabaseURI(oldURI,
-                                                                 newURI, 
isDryRun);
-    printDatabaseURIUpdateSummary(updateMDBURIRetVal, isDryRun);
-
-    System.out.println("Looking for LOCATION field in SDS table to update..");
-    ObjectStore.UpdateMStorageDescriptorTblURIRetVal updateTblURIRetVal =
-                         objStore.updateMStorageDescriptorTblURI(oldURI, 
newURI, isDryRun);
-    printTblURIUpdateSummary(updateTblURIRetVal, isDryRun);
-
-    if (tablePropKey != null) {
-      System.out.println("Looking for value of " + tablePropKey + " key in 
TABLE_PARAMS table " +
-          "to update..");
-      ObjectStore.UpdatePropURIRetVal updateTblPropURIRetVal =
-          objStore.updateTblPropURI(oldURI, newURI,
-              tablePropKey, isDryRun);
-      printPropURIUpdateSummary(updateTblPropURIRetVal, tablePropKey, 
isDryRun, "TABLE_PARAMS",
-          "updateTblPropURI");
-
-      System.out.println("Looking for value of " + tablePropKey + " key in 
SD_PARAMS table " +
-        "to update..");
-      ObjectStore.UpdatePropURIRetVal updatePropURIRetVal = objStore
-          .updateMStorageDescriptorTblPropURI(oldURI, newURI, tablePropKey, 
isDryRun);
-      printPropURIUpdateSummary(updatePropURIRetVal, tablePropKey, isDryRun, 
"SD_PARAMS",
-          "updateMStorageDescriptorTblPropURI");
-    }
-
-    if (serdePropKey != null) {
-      System.out.println("Looking for value of " + serdePropKey + " key in 
SERDE_PARAMS table " +
-        "to update..");
-      ObjectStore.UpdateSerdeURIRetVal updateSerdeURIretVal = 
objStore.updateSerdeURI(oldURI,
-                                                                newURI, 
serdePropKey, isDryRun);
-      printSerdePropURIUpdateSummary(updateSerdeURIretVal, serdePropKey, 
isDryRun);
-    }
-  }
-  private static void printAndExit(HiveMetaTool metaTool) {
-    HelpFormatter formatter = new HelpFormatter();
-    formatter.printHelp("metatool", metaTool.cmdLineOptions);
-    System.exit(1);
-  }
-
-  public static void main(String[] args) {
-    HiveMetaTool metaTool = new HiveMetaTool();
-    metaTool.init();
-    CommandLineParser parser = new GnuParser();
-    CommandLine line = null;
-
-    try {
-      try {
-        line = parser.parse(metaTool.cmdLineOptions, args);
-      } catch (ParseException e) {
-        System.err.println("HiveMetaTool:Parsing failed.  Reason: " + 
e.getLocalizedMessage());
-        printAndExit(metaTool);
-      }
-
-      if (line.hasOption("help")) {
-        HelpFormatter formatter = new HelpFormatter();
-        formatter.printHelp("metatool", metaTool.cmdLineOptions);
-      } else if (line.hasOption("listFSRoot")) {
-        if (line.hasOption("dryRun")) {
-          System.err.println("HiveMetaTool: dryRun is not valid with 
listFSRoot");
-          printAndExit(metaTool);
-        } else if (line.hasOption("serdePropKey")) {
-          System.err.println("HiveMetaTool: serdePropKey is not valid with 
listFSRoot");
-          printAndExit(metaTool);
-        } else if (line.hasOption("tablePropKey")) {
-          System.err.println("HiveMetaTool: tablePropKey is not valid with 
listFSRoot");
-          printAndExit(metaTool);
-        }
-        metaTool.listFSRoot();
-      } else if (line.hasOption("executeJDOQL")) {
-        String query = line.getOptionValue("executeJDOQL");
-        if (line.hasOption("dryRun")) {
-          System.err.println("HiveMetaTool: dryRun is not valid with 
executeJDOQL");
-          printAndExit(metaTool);
-        } else if (line.hasOption("serdePropKey")) {
-          System.err.println("HiveMetaTool: serdePropKey is not valid with 
executeJDOQL");
-          printAndExit(metaTool);
-        } else if (line.hasOption("tablePropKey")) {
-          System.err.println("HiveMetaTool: tablePropKey is not valid with 
executeJDOQL");
-          printAndExit(metaTool);
-        }
-        if (query.toLowerCase().trim().startsWith("select")) {
-          metaTool.executeJDOQLSelect(query);
-        } else if (query.toLowerCase().trim().startsWith("update")) {
-          metaTool.executeJDOQLUpdate(query);
-        } else {
-          System.err.println("HiveMetaTool:Unsupported statement type");
-          printAndExit(metaTool);
-        }
-      } else if (line.hasOption("updateLocation")) {
-        String[] loc = line.getOptionValues("updateLocation");
-        boolean isDryRun = false;
-        String serdepropKey = null;
-        String tablePropKey = null;
-
-        if (loc.length != 2 && loc.length != 3) {
-          System.err.println("HiveMetaTool:updateLocation takes in 2 required 
and 1 " +
-              "optional arguments but " +
-              "was passed " + loc.length + " arguments");
-          printAndExit(metaTool);
-        }
-
-        Path newPath = new Path(loc[0]);
-        Path oldPath = new Path(loc[1]);
-
-        URI oldURI = oldPath.toUri();
-        URI newURI = newPath.toUri();
-
-        if (line.hasOption("dryRun")) {
-          isDryRun = true;
-        }
-
-        if (line.hasOption("serdePropKey")) {
-          serdepropKey = line.getOptionValue("serdePropKey");
-        }
-
-        if (line.hasOption("tablePropKey")) {
-          tablePropKey = line.getOptionValue("tablePropKey");
-        }
-
-        /*
-         * validate input - Both new and old URI should contain valid host 
names and valid schemes.
-         * port is optional in both the URIs since HDFS HA NN URI doesn't have 
a port.
-         */
-          if (oldURI.getHost() == null || newURI.getHost() == null) {
-            System.err.println("HiveMetaTool:A valid host is required in both 
old-loc and new-loc");
-          } else if (oldURI.getScheme() == null || newURI.getScheme() == null) 
{
-            System.err.println("HiveMetaTool:A valid scheme is required in 
both old-loc and new-loc");
-          } else {
-            metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, 
tablePropKey, isDryRun);
-          }
-      } else {
-          if (line.hasOption("dryRun")) {
-            System.err.println("HiveMetaTool: dryRun is not a valid standalone 
option");
-          } else if (line.hasOption("serdePropKey")) {
-            System.err.println("HiveMetaTool: serdePropKey is not a valid 
standalone option");
-          } else if (line.hasOption("tablePropKey")) {
-            System.err.println("HiveMetaTool: tablePropKey is not a valid 
standalone option");
-            printAndExit(metaTool);
-          } else {
-            System.err.print("HiveMetaTool:Parsing failed.  Reason: Invalid 
arguments: " );
-            for (String s : line.getArgs()) {
-              System.err.print(s + " ");
-            }
-            System.err.println();
-          }
-          printAndExit(metaTool);
-        }
-      } finally {
-        metaTool.shutdownObjectStore();
-      }
-   }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/081fa368/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
deleted file mode 100644
index 2da07a5..0000000
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
+++ /dev/null
@@ -1,673 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.util.IllegalFormatException;
-import java.util.List;
-
-public class HiveSchemaHelper {
-  private static final Logger LOG = 
LoggerFactory.getLogger(HiveSchemaHelper.class);
-
-  public static final String DB_DERBY = "derby";
-  public static final String DB_HIVE = "hive";
-  public static final String DB_MSSQL = "mssql";
-  public static final String DB_MYSQL = "mysql";
-  public static final String DB_POSTGRACE = "postgres";
-  public static final String DB_ORACLE = "oracle";
-  public static final String EMBEDDED_HS2_URL =
-      
"jdbc:hive2://?hive.conf.restricted.list=;hive.security.authorization.sqlstd.confwhitelist=.*;"
-      + 
"hive.security.authorization.sqlstd.confwhitelist.append=.*;hive.security.authorization.enabled=false;"
-      + 
"hive.metastore.uris=;hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory;"
-      + 
"hive.support.concurrency=false;hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager;"
-      + 
"hive.metastore.rawstore.impl=org.apache.hadoop.hive.metastore.ObjectStore";
-  public static final String HIVE_JDBC_DRIVER = 
"org.apache.hive.jdbc.HiveDriver";
-
-  /***
-   * Get JDBC connection to metastore db
-   * @param userName metastore connection username
-   * @param password metastore connection password
-   * @param url Metastore URL.  If null will be read from config file.
-   * @param driver Driver class.  If null will be read from config file.
-   * @param printInfo print connection parameters
-   * @param conf hive config object
-   * @param schema the schema to create the connection for
-   * @return metastore connection object
-   * @throws org.apache.hadoop.hive.metastore.HiveMetaException
-   */
-  public static Connection getConnectionToMetastore(String userName, String 
password, String url,
-      String driver, boolean printInfo, Configuration conf, String schema) 
throws HiveMetaException {
-    try {
-      url = url == null ? 
getValidConfVar(MetastoreConf.ConfVars.CONNECT_URL_KEY, conf) : url;
-      driver = driver == null ? 
getValidConfVar(MetastoreConf.ConfVars.CONNECTION_DRIVER, conf) : driver;
-      if (printInfo) {
-        logAndPrintToStdout("Metastore connection URL:\t " + url);
-        logAndPrintToStdout("Metastore Connection Driver :\t " + driver);
-        logAndPrintToStdout("Metastore connection User:\t " + userName);
-        if (MetastoreConf.getBoolVar(conf, 
MetastoreConf.ConfVars.HIVE_IN_TEST)) {
-          logAndPrintToStdout("Metastore connection Password:\t " + password);
-        }
-      }
-      if ((userName == null) || userName.isEmpty()) {
-        throw new HiveMetaException("UserName empty ");
-      }
-
-      // load required JDBC driver
-      Class.forName(driver);
-
-      // Connect using the JDBC URL and user/pass from conf
-      Connection conn = DriverManager.getConnection(url, userName, password);
-      if (schema != null) {
-        conn.setSchema(schema);
-      }
-      return conn;
-    } catch (IOException | SQLException e) {
-      throw new HiveMetaException("Failed to get schema version.", e);
-    } catch (ClassNotFoundException e) {
-      LOG.error("Unable to find driver class", e);
-      throw new HiveMetaException("Failed to load driver", e);
-    }
-  }
-
-  public static Connection getConnectionToMetastore(MetaStoreConnectionInfo 
info, String schema)
-      throws HiveMetaException {
-    return getConnectionToMetastore(info.getUsername(), info.getPassword(), 
info.getUrl(), info.getDriver(),
-        info.getPrintInfo(), info.getConf(), schema);
-  }
-
-  public static String getValidConfVar(MetastoreConf.ConfVars confVar, 
Configuration conf)
-      throws IOException {
-    String confVarStr = MetastoreConf.getAsString(conf, confVar);
-    if (confVarStr == null || confVarStr.isEmpty()) {
-      throw new IOException("Empty " + confVar.getVarname());
-    }
-    return confVarStr.trim();
-  }
-
-  private static void logAndPrintToStdout(String msg) {
-    LOG.info(msg);
-    System.out.println(msg);
-  }
-
-  public interface NestedScriptParser {
-
-    enum CommandType {
-      PARTIAL_STATEMENT,
-      TERMINATED_STATEMENT,
-      COMMENT
-    }
-
-    String DEFAULT_DELIMITER = ";";
-    String DEFAULT_QUOTE = "\"";
-
-    /**
-     * Find the type of given command
-     *
-     * @param dbCommand
-     * @return
-     */
-    boolean isPartialCommand(String dbCommand) throws IllegalArgumentException;
-
-    /**
-     * Parse the DB specific nesting format and extract the inner script name 
if any
-     *
-     * @param dbCommand command from parent script
-     * @return
-     * @throws IllegalFormatException
-     */
-    String getScriptName(String dbCommand) throws IllegalArgumentException;
-
-    /**
-     * Find if the given command is a nested script execution
-     *
-     * @param dbCommand
-     * @return
-     */
-    boolean isNestedScript(String dbCommand);
-
-    /**
-     * Find if the given command should not be passed to DB
-     *
-     * @param dbCommand
-     * @return
-     */
-    boolean isNonExecCommand(String dbCommand);
-
-    /**
-     * Get the SQL statement delimiter
-     *
-     * @return
-     */
-    String getDelimiter();
-
-    /**
-     * Get the SQL indentifier quotation character
-     *
-     * @return
-     */
-    String getQuoteCharacter();
-
-    /**
-     * Clear any client specific tags
-     *
-     * @return
-     */
-    String cleanseCommand(String dbCommand);
-
-    /**
-     * Does the DB required table/column names quoted
-     *
-     * @return
-     */
-    boolean needsQuotedIdentifier();
-
-    /**
-     * Flatten the nested upgrade script into a buffer
-     *
-     * @param scriptDir  upgrade script directory
-     * @param scriptFile upgrade script file
-     * @return string of sql commands
-     */
-    String buildCommand(String scriptDir, String scriptFile)
-        throws IllegalFormatException, IOException;
-
-    /**
-     * Flatten the nested upgrade script into a buffer
-     *
-     * @param scriptDir  upgrade script directory
-     * @param scriptFile upgrade script file
-     * @param fixQuotes whether to replace quote characters
-     * @return string of sql commands
-     */
-    String buildCommand(String scriptDir, String scriptFile, boolean fixQuotes)
-        throws IllegalFormatException, IOException;
-  }
-
-  /***
-   * Base implementation of NestedScriptParser
-   * abstractCommandParser.
-   *
-   */
-  private static abstract class AbstractCommandParser implements 
NestedScriptParser {
-    private List<String> dbOpts;
-    private String msUsername;
-    private String msPassword;
-    private Configuration conf;
-    // Depending on whether we are using beeline or sqlline the line endings 
have to be handled
-    // differently.
-    private final boolean usingSqlLine;
-
-    public AbstractCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, boolean usingSqlLine) {
-      setDbOpts(dbOpts);
-      this.msUsername = msUsername;
-      this.msPassword = msPassword;
-      this.conf = conf;
-      this.usingSqlLine = usingSqlLine;
-    }
-
-    @Override
-    public boolean isPartialCommand(String dbCommand) throws 
IllegalArgumentException{
-      if (dbCommand == null || dbCommand.isEmpty()) {
-        throw new IllegalArgumentException("invalid command line " + 
dbCommand);
-      }
-      dbCommand = dbCommand.trim();
-      if (dbCommand.endsWith(getDelimiter()) || isNonExecCommand(dbCommand)) {
-        return false;
-      } else {
-        return true;
-      }
-    }
-
-    @Override
-    public boolean isNonExecCommand(String dbCommand) {
-      return (dbCommand.startsWith("--") || dbCommand.startsWith("#"));
-    }
-
-    @Override
-    public String getDelimiter() {
-      return DEFAULT_DELIMITER;
-    }
-
-    @Override
-    public String getQuoteCharacter() {
-      return DEFAULT_QUOTE;
-    }
-
-
-    @Override
-    public String cleanseCommand(String dbCommand) {
-      // strip off the delimiter
-      if (dbCommand.endsWith(getDelimiter())) {
-        dbCommand = dbCommand.substring(0,
-            dbCommand.length() - getDelimiter().length());
-      }
-      return dbCommand;
-    }
-
-    @Override
-    public boolean needsQuotedIdentifier() {
-      return false;
-    }
-
-    @Override
-    public String buildCommand(
-      String scriptDir, String scriptFile) throws IllegalFormatException, 
IOException {
-      return buildCommand(scriptDir, scriptFile, false);
-    }
-
-    @Override
-    public String buildCommand(
-      String scriptDir, String scriptFile, boolean fixQuotes) throws 
IllegalFormatException, IOException {
-      BufferedReader bfReader =
-          new BufferedReader(new FileReader(scriptDir + File.separatorChar + 
scriptFile));
-      String currLine;
-      StringBuilder sb = new StringBuilder();
-      String currentCommand = null;
-      while ((currLine = bfReader.readLine()) != null) {
-        currLine = currLine.trim();
-
-        if (fixQuotes && !getQuoteCharacter().equals(DEFAULT_QUOTE)) {
-          currLine = currLine.replace("\\\"", getQuoteCharacter());
-        }
-
-        if (currLine.isEmpty()) {
-          continue; // skip empty lines
-        }
-
-        if (currentCommand == null) {
-          currentCommand = currLine;
-        } else {
-          currentCommand = currentCommand + " " + currLine;
-        }
-        if (isPartialCommand(currLine)) {
-          // if its a partial line, continue collecting the pieces
-          continue;
-        }
-
-        // if this is a valid executable command then add it to the buffer
-        if (!isNonExecCommand(currentCommand)) {
-          currentCommand = cleanseCommand(currentCommand);
-          if (isNestedScript(currentCommand)) {
-            // if this is a nested sql script then flatten it
-            String currScript = getScriptName(currentCommand);
-            sb.append(buildCommand(scriptDir, currScript));
-          } else {
-            // Now we have a complete statement, process it
-            // write the line to buffer
-            sb.append(currentCommand);
-            if (usingSqlLine) sb.append(";");
-            sb.append(System.getProperty("line.separator"));
-          }
-        }
-        currentCommand = null;
-      }
-      bfReader.close();
-      return sb.toString();
-    }
-
-    private void setDbOpts(String dbOpts) {
-      if (dbOpts != null) {
-        this.dbOpts = Lists.newArrayList(dbOpts.split(","));
-      } else {
-        this.dbOpts = Lists.newArrayList();
-      }
-    }
-
-    protected List<String> getDbOpts() {
-      return dbOpts;
-    }
-
-    protected String getMsUsername() {
-      return msUsername;
-    }
-
-    protected String getMsPassword() {
-      return msPassword;
-    }
-
-    protected Configuration getConf() {
-      return conf;
-    }
-  }
-
-  // Derby commandline parser
-  public static class DerbyCommandParser extends AbstractCommandParser {
-    private static final String DERBY_NESTING_TOKEN = "RUN";
-
-    public DerbyCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, boolean usingSqlLine) {
-      super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
-    }
-
-    @Override
-    public String getScriptName(String dbCommand) throws 
IllegalArgumentException {
-
-      if (!isNestedScript(dbCommand)) {
-        throw new IllegalArgumentException("Not a script format " + dbCommand);
-      }
-      String[] tokens = dbCommand.split(" ");
-      if (tokens.length != 2) {
-        throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
-      }
-      return tokens[1].replace(";", "").replaceAll("'", "");
-    }
-
-    @Override
-    public boolean isNestedScript(String dbCommand) {
-      // Derby script format is RUN '<file>'
-     return dbCommand.startsWith(DERBY_NESTING_TOKEN);
-    }
-  }
-
-  // Derby commandline parser
-  public static class HiveCommandParser extends AbstractCommandParser {
-    private static String HIVE_NESTING_TOKEN = "SOURCE";
-    private final NestedScriptParser nestedDbCommandParser;
-
-    public HiveCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, String metaDbType, boolean usingSqlLine) {
-      super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
-      nestedDbCommandParser = getDbCommandParser(metaDbType, usingSqlLine);
-    }
-
-    @Override
-    public String getQuoteCharacter() {
-      return nestedDbCommandParser.getQuoteCharacter();
-    }
-
-    @Override
-    public String getScriptName(String dbCommand) throws 
IllegalArgumentException {
-
-      if (!isNestedScript(dbCommand)) {
-        throw new IllegalArgumentException("Not a script format " + dbCommand);
-      }
-      String[] tokens = dbCommand.split(" ");
-      if (tokens.length != 2) {
-        throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
-      }
-      return tokens[1].replace(";", "");
-    }
-
-    @Override
-    public boolean isNestedScript(String dbCommand) {
-     return dbCommand.startsWith(HIVE_NESTING_TOKEN);
-    }
-  }
-
-  // MySQL parser
-  public static class MySqlCommandParser extends AbstractCommandParser {
-    private static final String MYSQL_NESTING_TOKEN = "SOURCE";
-    private static final String DELIMITER_TOKEN = "DELIMITER";
-    private String delimiter = DEFAULT_DELIMITER;
-
-    public MySqlCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, boolean usingSqlLine) {
-      super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
-    }
-
-    @Override
-    public boolean isPartialCommand(String dbCommand) throws 
IllegalArgumentException{
-      boolean isPartial = super.isPartialCommand(dbCommand);
-      // if this is a delimiter directive, reset our delimiter
-      if (dbCommand.startsWith(DELIMITER_TOKEN)) {
-        String[] tokens = dbCommand.split(" ");
-        if (tokens.length != 2) {
-          throw new IllegalArgumentException("Couldn't parse line " + 
dbCommand);
-        }
-        delimiter = tokens[1];
-      }
-      return isPartial;
-    }
-
-    @Override
-    public String getScriptName(String dbCommand) throws 
IllegalArgumentException {
-      String[] tokens = dbCommand.split(" ");
-      if (tokens.length != 2) {
-        throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
-      }
-      // remove ending ';'
-      return tokens[1].replace(";", "");
-    }
-
-    @Override
-    public boolean isNestedScript(String dbCommand) {
-      return dbCommand.startsWith(MYSQL_NESTING_TOKEN);
-    }
-
-    @Override
-    public String getDelimiter() {
-      return delimiter;
-    }
-
-    @Override
-    public String getQuoteCharacter() {
-      return "`";
-    }
-
-    @Override
-    public boolean isNonExecCommand(String dbCommand) {
-      return super.isNonExecCommand(dbCommand) ||
-          (dbCommand.startsWith("/*") && dbCommand.endsWith("*/")) ||
-          dbCommand.startsWith(DELIMITER_TOKEN);
-    }
-
-    @Override
-    public String cleanseCommand(String dbCommand) {
-      return super.cleanseCommand(dbCommand).replaceAll("/\\*.*?\\*/[^;]", "");
-    }
-
-  }
-
-  // Postgres specific parser
-  public static class PostgresCommandParser extends AbstractCommandParser {
-    private static final String POSTGRES_NESTING_TOKEN = "\\i";
-    @VisibleForTesting
-    public static final String POSTGRES_STANDARD_STRINGS_OPT = "SET 
standard_conforming_strings";
-    @VisibleForTesting
-    public static final String POSTGRES_SKIP_STANDARD_STRINGS_DBOPT = 
"postgres.filter.81";
-
-    public PostgresCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, boolean usingSqlLine) {
-      super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
-    }
-
-    @Override
-    public String getScriptName(String dbCommand) throws 
IllegalArgumentException {
-      String[] tokens = dbCommand.split(" ");
-      if (tokens.length != 2) {
-        throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
-      }
-      // remove ending ';'
-      return tokens[1].replace(";", "");
-    }
-
-    @Override
-    public boolean isNestedScript(String dbCommand) {
-      return dbCommand.startsWith(POSTGRES_NESTING_TOKEN);
-    }
-
-    @Override
-    public boolean needsQuotedIdentifier() {
-      return true;
-    }
-
-    @Override
-    public boolean isNonExecCommand(String dbCommand) {
-      // Skip "standard_conforming_strings" command which is read-only in older
-      // Postgres versions like 8.1
-      // See: http://www.postgresql.org/docs/8.2/static/release-8-1.html
-      if (getDbOpts().contains(POSTGRES_SKIP_STANDARD_STRINGS_DBOPT)) {
-        if (dbCommand.startsWith(POSTGRES_STANDARD_STRINGS_OPT)) {
-          return true;
-        }
-      }
-      return super.isNonExecCommand(dbCommand);
-    }
-  }
-
-  //Oracle specific parser
-  public static class OracleCommandParser extends AbstractCommandParser {
-    private static final String ORACLE_NESTING_TOKEN = "@";
-
-    public OracleCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, boolean usingSqlLine) {
-      super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
-    }
-
-    @Override
-    public String getScriptName(String dbCommand) throws 
IllegalArgumentException {
-      if (!isNestedScript(dbCommand)) {
-        throw new IllegalArgumentException("Not a nested script format " + 
dbCommand);
-      }
-      // remove ending ';' and starting '@'
-      return dbCommand.replace(";", "").replace(ORACLE_NESTING_TOKEN, "");
-    }
-
-    @Override
-    public boolean isNestedScript(String dbCommand) {
-      return dbCommand.startsWith(ORACLE_NESTING_TOKEN);
-    }
-  }
-
-  //MSSQL specific parser
-  public static class MSSQLCommandParser extends AbstractCommandParser {
-    private static final String MSSQL_NESTING_TOKEN = ":r";
-
-    public MSSQLCommandParser(String dbOpts, String msUsername, String 
msPassword,
-        Configuration conf, boolean usingSqlLine) {
-      super(dbOpts, msUsername, msPassword, conf, usingSqlLine);
-    }
-
-    @Override
-    public String getScriptName(String dbCommand) throws 
IllegalArgumentException {
-      String[] tokens = dbCommand.split(" ");
-      if (tokens.length != 2) {
-        throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
-      }
-      return tokens[1];
-    }
-
-    @Override
-    public boolean isNestedScript(String dbCommand) {
-      return dbCommand.startsWith(MSSQL_NESTING_TOKEN);
-    }
-  }
-
-  public static NestedScriptParser getDbCommandParser(String dbName, boolean 
usingSqlLine) {
-    return getDbCommandParser(dbName, null, usingSqlLine);
-  }
-
-  public static NestedScriptParser getDbCommandParser(String dbName, String 
metaDbName, boolean usingSqlLine) {
-    return getDbCommandParser(dbName, null, null, null, null, metaDbName, 
usingSqlLine);
-  }
-
-  public static NestedScriptParser getDbCommandParser(String dbName,
-      String dbOpts, String msUsername, String msPassword,
-      Configuration conf, String metaDbType, boolean usingSqlLine) {
-    if (dbName.equalsIgnoreCase(DB_DERBY)) {
-      return new DerbyCommandParser(dbOpts, msUsername, msPassword, conf, 
usingSqlLine);
-    } else if (dbName.equalsIgnoreCase(DB_HIVE)) {
-      return new HiveCommandParser(dbOpts, msUsername, msPassword, conf, 
metaDbType, usingSqlLine);
-    } else if (dbName.equalsIgnoreCase(DB_MSSQL)) {
-      return new MSSQLCommandParser(dbOpts, msUsername, msPassword, conf, 
usingSqlLine);
-    } else if (dbName.equalsIgnoreCase(DB_MYSQL)) {
-      return new MySqlCommandParser(dbOpts, msUsername, msPassword, conf, 
usingSqlLine);
-    } else if (dbName.equalsIgnoreCase(DB_POSTGRACE)) {
-      return new PostgresCommandParser(dbOpts, msUsername, msPassword, conf, 
usingSqlLine);
-    } else if (dbName.equalsIgnoreCase(DB_ORACLE)) {
-      return new OracleCommandParser(dbOpts, msUsername, msPassword, conf, 
usingSqlLine);
-    } else {
-      throw new IllegalArgumentException("Unknown dbType " + dbName);
-    }
-  }
-
-  public static class MetaStoreConnectionInfo {
-    private final String userName;
-    private final String password;
-    private final String url;
-    private final String driver;
-    private final boolean printInfo;
-    private final Configuration conf;
-    private final String dbType;
-    private final String metaDbType;
-
-    public MetaStoreConnectionInfo(String userName, String password, String 
url, String driver,
-                                   boolean printInfo, Configuration conf, 
String dbType, String metaDbType) {
-      super();
-      this.userName = userName;
-      this.password = password;
-      this.url = url;
-      this.driver = driver;
-      this.printInfo = printInfo;
-      this.conf = conf;
-      this.dbType = dbType;
-      this.metaDbType = metaDbType;
-    }
-
-    public String getPassword() {
-      return password;
-    }
-
-    public String getUrl() {
-      return url;
-    }
-
-    public String getDriver() {
-      return driver;
-    }
-
-    public boolean isPrintInfo() {
-      return printInfo;
-    }
-
-    public Configuration getConf() {
-      return conf;
-    }
-
-    public String getUsername() {
-      return userName;
-    }
-
-    public boolean getPrintInfo() {
-      return printInfo;
-    }
-
-    public String getDbType() {
-      return dbType;
-    }
-
-    public String getMetaDbType() {
-      return metaDbType;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/081fa368/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
deleted file mode 100644
index c2018f4..0000000
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
+++ /dev/null
@@ -1,460 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.io.output.NullOutputStream;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.IMetaStoreSchemaInfo;
-import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfoFactory;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
-import 
org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
-import 
org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sqlline.SqlLine;
-
-import java.io.BufferedReader;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.net.URI;
-import java.sql.Connection;
-import java.sql.SQLException;
-
-public class MetastoreSchemaTool {
-  private static final Logger LOG = 
LoggerFactory.getLogger(MetastoreSchemaTool.class);
-  private static final String PASSWD_MASK = "[passwd stripped]";
-
-  protected Configuration conf;
-
-  protected String dbOpts = null;
-  protected String dbType;
-  protected String driver = null;
-  protected boolean dryRun = false;
-  protected String hiveDb; // Hive database, for use when creating the user, 
not for connecting
-  protected String hivePasswd; // Hive password, for use when creating the 
user, not for connecting
-  protected String hiveUser; // Hive username, for use when creating the user, 
not for connecting
-  protected String metaDbType;
-  protected IMetaStoreSchemaInfo metaStoreSchemaInfo;
-  protected boolean needsQuotedIdentifier;
-  protected String quoteCharacter;
-  protected String passWord = null;
-  protected String url = null;
-  protected String userName = null;
-  protected URI[] validationServers = null; // The list of servers the 
database/partition/table can locate on
-  protected boolean verbose = false;
-  protected SchemaToolCommandLine cmdLine;
-
-  private static String homeDir;
-
-  private static String findHomeDir() {
-    // If METASTORE_HOME is set, use it, else use HIVE_HOME for backwards 
compatibility.
-    homeDir = homeDir == null ? System.getenv("METASTORE_HOME") : homeDir;
-    return homeDir == null ? System.getenv("HIVE_HOME") : homeDir;
-  }
-
-  @VisibleForTesting
-  public static void setHomeDirForTesting() {
-    homeDir = System.getProperty("test.tmp.dir", "target/tmp");
-  }
-
-  @VisibleForTesting
-  public MetastoreSchemaTool() {
-
-  }
-
-  @VisibleForTesting
-  public void init(String metastoreHome, String[] args, OptionGroup 
additionalOptions,
-                   Configuration conf) throws HiveMetaException {
-    try {
-      cmdLine = new SchemaToolCommandLine(args, additionalOptions);
-    } catch (ParseException e) {
-      System.err.println("Failed to parse command line. ");
-      throw new HiveMetaException(e);
-    }
-
-    if (metastoreHome == null || metastoreHome.isEmpty()) {
-      throw new HiveMetaException("No Metastore home directory provided");
-    }
-    this.conf = conf;
-    this.dbType = cmdLine.getDbType();
-    this.metaDbType = cmdLine.getMetaDbType();
-    NestedScriptParser parser = getDbCommandParser(dbType, metaDbType);
-    this.needsQuotedIdentifier = parser.needsQuotedIdentifier();
-    this.quoteCharacter = parser.getQuoteCharacter();
-    this.metaStoreSchemaInfo = MetaStoreSchemaInfoFactory.get(conf, 
metastoreHome, dbType);
-
-    if (cmdLine.hasOption("userName")) {
-      setUserName(cmdLine.getOptionValue("userName"));
-    } else {
-      
setUserName(getConf().get(MetastoreConf.ConfVars.CONNECTION_USER_NAME.getVarname()));
-    }
-    if (cmdLine.hasOption("passWord")) {
-      setPassWord(cmdLine.getOptionValue("passWord"));
-    } else {
-      try {
-        setPassWord(MetastoreConf.getPassword(getConf(), ConfVars.PWD));
-      } catch (IOException err) {
-        throw new HiveMetaException("Error getting metastore password", err);
-      }
-    }
-    if (cmdLine.hasOption("url")) {
-      setUrl(cmdLine.getOptionValue("url"));
-    }
-    if (cmdLine.hasOption("driver")) {
-      setDriver(cmdLine.getOptionValue("driver"));
-    }
-    if (cmdLine.hasOption("dryRun")) {
-      setDryRun(true);
-    }
-    if (cmdLine.hasOption("verbose")) {
-      setVerbose(true);
-    }
-    if (cmdLine.hasOption("dbOpts")) {
-      setDbOpts(cmdLine.getOptionValue("dbOpts"));
-    }
-    if (cmdLine.hasOption("validate") && cmdLine.hasOption("servers")) {
-      setValidationServers(cmdLine.getOptionValue("servers"));
-    }
-    if (cmdLine.hasOption("hiveUser")) {
-      setHiveUser(cmdLine.getOptionValue("hiveUser"));
-    }
-    if (cmdLine.hasOption("hivePassword")) {
-      setHivePasswd(cmdLine.getOptionValue("hivePassword"));
-    }
-    if (cmdLine.hasOption("hiveDb")) {
-      setHiveDb(cmdLine.getOptionValue("hiveDb"));
-    }
-  }
-
-  public Configuration getConf() {
-    return conf;
-  }
-
-  protected String getDbType() {
-    return dbType;
-  }
-
-  protected void setUrl(String url) {
-    this.url = url;
-  }
-
-  protected void setDriver(String driver) {
-    this.driver = driver;
-  }
-
-  public void setUserName(String userName) {
-    this.userName = userName;
-  }
-
-  public void setPassWord(String passWord) {
-    this.passWord = passWord;
-  }
-
-  protected boolean isDryRun() {
-    return dryRun;
-  }
-
-  protected void setDryRun(boolean dryRun) {
-    this.dryRun = dryRun;
-  }
-
-  protected boolean isVerbose() {
-    return verbose;
-  }
-
-  protected void setVerbose(boolean verbose) {
-    this.verbose = verbose;
-  }
-
-  protected void setDbOpts(String dbOpts) {
-    this.dbOpts = dbOpts;
-  }
-
-  protected URI[] getValidationServers() {
-    return validationServers;
-  }
-
-  protected void setValidationServers(String servers) {
-    if(StringUtils.isNotEmpty(servers)) {
-      String[] strServers = servers.split(",");
-      this.validationServers = new URI[strServers.length];
-      for (int i = 0; i < validationServers.length; i++) {
-        validationServers[i] = new Path(strServers[i]).toUri();
-      }
-    }
-  }
-
-  protected String getHiveUser() {
-    return hiveUser;
-  }
-
-  protected void setHiveUser(String hiveUser) {
-    this.hiveUser = hiveUser;
-  }
-
-  protected String getHivePasswd() {
-    return hivePasswd;
-  }
-
-  protected void setHivePasswd(String hivePasswd) {
-    this.hivePasswd = hivePasswd;
-  }
-
-  protected String getHiveDb() {
-    return hiveDb;
-  }
-
-  protected void setHiveDb(String hiveDb) {
-    this.hiveDb = hiveDb;
-  }
-
-  protected SchemaToolCommandLine getCmdLine() {
-    return cmdLine;
-  }
-
-  public Connection getConnectionToMetastore(boolean printInfo) throws 
HiveMetaException {
-    return HiveSchemaHelper.getConnectionToMetastore(userName,
-        passWord, url, driver, printInfo, conf, null);
-  }
-
-  protected NestedScriptParser getDbCommandParser(String dbType, String 
metaDbType) {
-    return HiveSchemaHelper.getDbCommandParser(dbType, dbOpts, userName,
-        passWord, conf, null, true);
-  }
-
-  protected MetaStoreConnectionInfo getConnectionInfo(boolean printInfo) {
-    return new MetaStoreConnectionInfo(userName, passWord, url, driver, 
printInfo, conf,
-        dbType, hiveDb);
-  }
-
-  protected IMetaStoreSchemaInfo getMetaStoreSchemaInfo() {
-    return metaStoreSchemaInfo;
-  }
-
-  /**
-   * check if the current schema version in metastore matches the Hive version
-   */
-  @VisibleForTesting
-  void verifySchemaVersion() throws HiveMetaException {
-    // don't check version if its a dry run
-    if (dryRun) {
-      return;
-    }
-    String newSchemaVersion = 
metaStoreSchemaInfo.getMetaStoreSchemaVersion(getConnectionInfo(false));
-    // verify that the new version is added to schema
-    assertCompatibleVersion(metaStoreSchemaInfo.getHiveSchemaVersion(), 
newSchemaVersion);
-  }
-
-  protected void assertCompatibleVersion(String hiveSchemaVersion, String 
dbSchemaVersion)
-      throws HiveMetaException {
-    if (!metaStoreSchemaInfo.isVersionCompatible(hiveSchemaVersion, 
dbSchemaVersion)) {
-      throw new HiveMetaException("Metastore schema version is not compatible. 
Hive Version: "
-          + hiveSchemaVersion + ", Database Schema Version: " + 
dbSchemaVersion);
-    }
-  }
-
-  /***
-   * Execute a given metastore script. This default version uses sqlline to 
execute the files,
-   * which requires only running one file.  Subclasses can use other executors.
-   * @param scriptDir directory script is in
-   * @param scriptFile file in the directory to run
-   * @throws IOException if it cannot read the file or directory
-   * @throws HiveMetaException default implementation never throws this
-   */
-  protected void execSql(String scriptDir, String scriptFile) throws 
IOException, HiveMetaException {
-
-    execSql(scriptDir + File.separatorChar + scriptFile);
-  }
-
-  // Generate the beeline args per hive conf and execute the given script
-  protected void execSql(String sqlScriptFile) throws IOException {
-    CommandBuilder builder = new CommandBuilder(conf, url, driver, userName, 
passWord, sqlScriptFile);
-
-    // run the script using SqlLine
-    SqlLine sqlLine = new SqlLine();
-    ByteArrayOutputStream outputForLog = null;
-    if (!verbose) {
-      OutputStream out;
-      if (LOG.isDebugEnabled()) {
-        out = outputForLog = new ByteArrayOutputStream();
-      } else {
-        out = new NullOutputStream();
-      }
-      sqlLine.setOutputStream(new PrintStream(out));
-      System.setProperty("sqlline.silent", "true");
-    }
-    LOG.info("Going to run command <" + builder.buildToLog() + ">");
-    SqlLine.Status status = sqlLine.begin(builder.buildToRun(), null, false);
-    if (LOG.isDebugEnabled() && outputForLog != null) {
-      LOG.debug("Received following output from Sqlline:");
-      LOG.debug(outputForLog.toString("UTF-8"));
-    }
-    if (status != SqlLine.Status.OK) {
-      throw new IOException("Schema script failed, errorcode " + status);
-    }
-  }
-
-  // test the connection metastore using the config property
-  protected void testConnectionToMetastore() throws HiveMetaException {
-    Connection conn = getConnectionToMetastore(true);
-    try {
-      conn.close();
-    } catch (SQLException e) {
-      throw new HiveMetaException("Failed to close metastore connection", e);
-    }
-  }
-
-  // Quote if the database requires it
-  protected String quote(String stmt) {
-    stmt = stmt.replace("<q>", needsQuotedIdentifier ? quoteCharacter : "");
-    stmt = stmt.replace("<qa>", quoteCharacter);
-    return stmt;
-  }
-
-  protected static class CommandBuilder {
-    protected final String userName;
-    protected final String password;
-    protected final String sqlScriptFile;
-    protected final String driver;
-    protected final String url;
-
-    protected CommandBuilder(Configuration conf, String url, String driver, 
String userName,
-                             String password, String sqlScriptFile) throws 
IOException {
-      this.userName = userName;
-      this.password = password;
-      this.url = url == null ?
-          
HiveSchemaHelper.getValidConfVar(MetastoreConf.ConfVars.CONNECT_URL_KEY, conf) 
: url;
-      this.driver = driver == null ?
-          
HiveSchemaHelper.getValidConfVar(MetastoreConf.ConfVars.CONNECTION_DRIVER, 
conf) : driver;
-      this.sqlScriptFile = sqlScriptFile;
-    }
-
-    public String[] buildToRun() throws IOException {
-      return argsWith(password);
-    }
-
-    public String buildToLog() throws IOException {
-      logScript();
-      return StringUtils.join(argsWith(PASSWD_MASK), " ");
-    }
-
-    protected String[] argsWith(String password) throws IOException {
-      return new String[]
-        {
-          "-u", url,
-          "-d", driver,
-          "-n", userName,
-          "-p", password,
-          "--isolation=TRANSACTION_READ_COMMITTED",
-          "-f", sqlScriptFile
-        };
-    }
-
-    private void logScript() throws IOException {
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Going to invoke file that contains:");
-        try (BufferedReader reader = new BufferedReader(new 
FileReader(sqlScriptFile))) {
-          String line;
-          while ((line = reader.readLine()) != null) {
-            LOG.debug("script: " + line);
-          }
-        }
-      }
-    }
-  }
-
-  // Create the required command line options
-  private static void logAndPrintToError(String errmsg) {
-    LOG.error(errmsg);
-    System.err.println(errmsg);
-  }
-
-  public static void main(String[] args) {
-    MetastoreSchemaTool tool = new MetastoreSchemaTool();
-    System.exit(tool.run(args));
-  }
-
-  public int run(String[] args) {
-    return run(findHomeDir(), args, null, MetastoreConf.newMetastoreConf());
-  }
-
-  public int run(String metastoreHome, String[] args, OptionGroup 
additionalOptions,
-                 Configuration conf) {
-    try {
-      init(metastoreHome, args, additionalOptions, conf);
-      SchemaToolTask task;
-      if (cmdLine.hasOption("info")) {
-        task = new SchemaToolTaskInfo();
-      } else if (cmdLine.hasOption("upgradeSchema") || 
cmdLine.hasOption("upgradeSchemaFrom")) {
-        task = new SchemaToolTaskUpgrade();
-      } else if (cmdLine.hasOption("initSchema") || 
cmdLine.hasOption("initSchemaTo")) {
-        task = new SchemaToolTaskInit();
-      } else if (cmdLine.hasOption("validate")) {
-        task = new SchemaToolTaskValidate();
-      } else if (cmdLine.hasOption("createCatalog")) {
-        task = new SchemaToolTaskCreateCatalog();
-      } else if (cmdLine.hasOption("alterCatalog")) {
-        task = new SchemaToolTaskAlterCatalog();
-      } else if (cmdLine.hasOption("moveDatabase")) {
-        task = new SchemaToolTaskMoveDatabase();
-      } else if (cmdLine.hasOption("moveTable")) {
-        task = new SchemaToolTaskMoveTable();
-      } else if (cmdLine.hasOption("createUser")) {
-        task = new SchemaToolTaskCreateUser();
-      } else {
-        throw new HiveMetaException("No task defined!");
-      }
-
-      task.setHiveSchemaTool(this);
-      task.setCommandLineArguments(cmdLine);
-      task.execute();
-      return 0;
-    } catch (HiveMetaException e) {
-      logAndPrintToError(e.getMessage());
-      if (e.getCause() != null) {
-        Throwable t = e.getCause();
-        logAndPrintToError("Underlying cause: "
-            + t.getClass().getName() + " : "
-            + t.getMessage());
-        if (e.getCause() instanceof SQLException) {
-          logAndPrintToError("SQL Error code: " + ((SQLException) 
t).getErrorCode());
-        }
-      }
-      if (cmdLine.hasOption("verbose")) {
-        e.printStackTrace();
-      } else {
-        logAndPrintToError("Use --verbose for detailed stacktrace.");
-      }
-      logAndPrintToError("*** schemaTool failed ***");
-      return 1;
-
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/081fa368/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/SQLGenerator.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/SQLGenerator.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/SQLGenerator.java
deleted file mode 100644
index d0ac7db..0000000
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/tools/SQLGenerator.java
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.tools;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.DatabaseProduct;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Helper class that generates SQL queries with syntax specific to target DB
- * todo: why throw MetaException?
- */
-@VisibleForTesting
-public final class SQLGenerator {
-  static final private Logger LOG = 
LoggerFactory.getLogger(SQLGenerator.class.getName());
-  private final DatabaseProduct dbProduct;
-
-  private final Configuration conf;
-
-  public SQLGenerator(DatabaseProduct dbProduct, Configuration conf) {
-    this.dbProduct = dbProduct;
-    this.conf = conf;
-  }
-
-  /**
-   * Genereates "Insert into T(a,b,c) values(1,2,'f'),(3,4,'c')" for 
appropriate DB
-   *
-   * @param tblColumns e.g. "T(a,b,c)"
-   * @param rows       e.g. list of Strings like 3,4,'d'
-   * @return fully formed INSERT INTO ... statements
-   */
-  public List<String> createInsertValuesStmt(String tblColumns, List<String> 
rows) {
-    if (rows == null || rows.size() == 0) {
-      return Collections.emptyList();
-    }
-    List<String> insertStmts = new ArrayList<>();
-    StringBuilder sb = new StringBuilder();
-    switch (dbProduct) {
-    case ORACLE:
-      if (rows.size() > 1) {
-        //http://www.oratable.com/oracle-insert-all/
-        
//https://livesql.oracle.com/apex/livesql/file/content_BM1LJQ87M5CNIOKPOWPV6ZGR3.html
-        for (int numRows = 0; numRows < rows.size(); numRows++) {
-          if (numRows % MetastoreConf.getIntVar(conf, 
ConfVars.DIRECT_SQL_MAX_ELEMENTS_VALUES_CLAUSE) == 0) {
-            if (numRows > 0) {
-              sb.append(" select * from dual");
-              insertStmts.add(sb.toString());
-            }
-            sb.setLength(0);
-            sb.append("insert all ");
-          }
-          sb.append("into ").append(tblColumns).append(" 
values(").append(rows.get(numRows))
-              .append(") ");
-        }
-        sb.append("select * from dual");
-        insertStmts.add(sb.toString());
-        return insertStmts;
-      }
-      //fall through
-    case DERBY:
-    case MYSQL:
-    case POSTGRES:
-    case SQLSERVER:
-      for (int numRows = 0; numRows < rows.size(); numRows++) {
-        if (numRows % MetastoreConf.getIntVar(conf, 
ConfVars.DIRECT_SQL_MAX_ELEMENTS_VALUES_CLAUSE) == 0) {
-          if (numRows > 0) {
-            insertStmts.add(sb.substring(0, sb.length() - 1));//exclude 
trailing comma
-          }
-          sb.setLength(0);
-          sb.append("insert into ").append(tblColumns).append(" values");
-        }
-        sb.append('(').append(rows.get(numRows)).append("),");
-      }
-      insertStmts.add(sb.substring(0, sb.length() - 1));//exclude trailing 
comma
-      return insertStmts;
-    default:
-      String msg = "Unrecognized database product name <" + dbProduct + ">";
-      LOG.error(msg);
-      throw new IllegalStateException(msg);
-    }
-  }
-
-  /**
-   * Given a {@code selectStatement}, decorated it with FOR UPDATE or 
semantically equivalent
-   * construct.  If the DB doesn't support, return original select.
-   */
-  public String addForUpdateClause(String selectStatement) throws 
MetaException {
-    switch (dbProduct) {
-    case DERBY:
-      //https://db.apache.org/derby/docs/10.1/ref/rrefsqlj31783.html
-      //sadly in Derby, FOR UPDATE doesn't meant what it should
-      return selectStatement;
-    case MYSQL:
-      //http://dev.mysql.com/doc/refman/5.7/en/select.html
-    case ORACLE:
-      //https://docs.oracle.com/cd/E17952_01/refman-5.6-en/select.html
-    case POSTGRES:
-      //http://www.postgresql.org/docs/9.0/static/sql-select.html
-      return selectStatement + " for update";
-    case SQLSERVER:
-      //https://msdn.microsoft.com/en-us/library/ms189499.aspx
-      //https://msdn.microsoft.com/en-us/library/ms187373.aspx
-      String modifier = " with (updlock)";
-      int wherePos = selectStatement.toUpperCase().indexOf(" WHERE ");
-      if (wherePos < 0) {
-        return selectStatement + modifier;
-      }
-      return selectStatement.substring(0, wherePos) + modifier +
-          selectStatement.substring(wherePos, selectStatement.length());
-    default:
-      String msg = "Unrecognized database product name <" + dbProduct + ">";
-      LOG.error(msg);
-      throw new MetaException(msg);
-    }
-  }
-
-  /**
-   * Suppose you have a query "select a,b from T" and you want to limit the 
result set
-   * to the first 5 rows.  The mechanism to do that differs in different DBs.
-   * Make {@code noSelectsqlQuery} to be "a,b from T" and this method will 
return the
-   * appropriately modified row limiting query.
-   * <p>
-   * Note that if {@code noSelectsqlQuery} contains a join, you must make sure 
that
-   * all columns are unique for Oracle.
-   */
-  public String addLimitClause(int numRows, String noSelectsqlQuery) throws 
MetaException {
-    switch (dbProduct) {
-    case DERBY:
-      //http://db.apache.org/derby/docs/10.7/ref/rrefsqljoffsetfetch.html
-      return "select " + noSelectsqlQuery + " fetch first " + numRows + " rows 
only";
-    case MYSQL:
-      //http://www.postgresql.org/docs/7.3/static/queries-limit.html
-    case POSTGRES:
-      //https://dev.mysql.com/doc/refman/5.0/en/select.html
-      return "select " + noSelectsqlQuery + " limit " + numRows;
-    case ORACLE:
-      //newer versions (12c and later) support OFFSET/FETCH
-      return "select * from (select " + noSelectsqlQuery + ") where rownum <= 
" + numRows;
-    case SQLSERVER:
-      //newer versions (2012 and later) support OFFSET/FETCH
-      //https://msdn.microsoft.com/en-us/library/ms189463.aspx
-      return "select TOP(" + numRows + ") " + noSelectsqlQuery;
-    default:
-      String msg = "Unrecognized database product name <" + dbProduct + ">";
-      LOG.error(msg);
-      throw new MetaException(msg);
-    }
-  }
-
-  public DatabaseProduct getDbProduct() {
-    return dbProduct;
-  }
-
-  // This is required for SQL executed directly. If the SQL has double quotes 
then some dbs tend to
-  // remove the escape characters and store the variable without double quote.
-  public String addEscapeCharacters(String s) {
-    if (dbProduct ==  DatabaseProduct.MYSQL) {
-      return s.replaceAll("\\\\", "\\\\\\\\");
-    }
-    return s;
-  }
-
-}

Reply via email to