http://git-wip-us.apache.org/repos/asf/hive/blob/133d3c47/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
new file mode 100644
index 0000000..2671c1f
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import org.antlr.runtime.CommonTokenStream;
+import org.antlr.runtime.RecognitionException;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hadoop.hive.metastore.utils.JavaUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
+import org.apache.hadoop.hive.metastore.parser.FilterLexer;
+import org.apache.hadoop.hive.metastore.parser.FilterParser;
+import 
org.apache.hadoop.hive.metastore.parser.ExpressionTree.ANTLRNoCaseStringStream;
+import org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode;
+import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
+
+/**
+ * Utility functions for working with partition filter expressions
+ */
+public class PartFilterExprUtil {
+  private static final Logger LOG = 
LoggerFactory.getLogger(PartFilterExprUtil.class.getName());
+
+
+  public static ExpressionTree makeExpressionTree(PartitionExpressionProxy 
expressionProxy,
+      byte[] expr) throws MetaException {
+    // We will try pushdown first, so make the filter. This will also validate 
the expression,
+    // if serialization fails we will throw incompatible metastore error to 
the client.
+    String filter = null;
+    try {
+      filter = expressionProxy.convertExprToFilter(expr);
+    } catch (MetaException ex) {
+      // TODO MS-SPLIT - for now we have construct this by reflection because 
IMetaStoreClient
+      // can't be
+      // moved until after HiveMetaStore is moved, which can't be moved until 
this is moved.
+      Class<? extends MetaException> exClass = JavaUtils.getClass(
+          
"org.apache.hadoop.hive.metastore.IMetaStoreClient$IncompatibleMetastoreException",
+          MetaException.class);
+      throw JavaUtils.newInstance(exClass, new Class<?>[]{String.class}, new 
Object[]{ex.getMessage()});
+    }
+
+    // Make a tree out of the filter.
+    // TODO: this is all pretty ugly. The only reason we need all these 
transformations
+    //       is to maintain support for simple filters for HCat users that 
query metastore.
+    //       If forcing everyone to use thick client is out of the question, 
maybe we could
+    //       parse the filter into standard hive expressions and not all this 
separate tree
+    //       Filter.g stuff. That way this method and ...ByFilter would just 
be merged.
+    return PartFilterExprUtil.makeExpressionTree(filter);
+  }
+
+
+  /**
+   * Creates the proxy used to evaluate expressions. This is here to prevent 
circular
+   * dependency - ql -&gt; metastore client &lt;-&gt; metastore server -&gt; 
ql. If server and
+   * client are split, this can be removed.
+   * @param conf Configuration.
+   * @return The partition expression proxy.
+   */
+  public static PartitionExpressionProxy createExpressionProxy(Configuration 
conf) {
+    String className = MetastoreConf.getVar(conf, 
ConfVars.EXPRESSION_PROXY_CLASS);
+    try {
+      @SuppressWarnings("unchecked")
+      Class<? extends PartitionExpressionProxy> clazz =
+           JavaUtils.getClass(className, PartitionExpressionProxy.class);
+      return JavaUtils.newInstance(
+          clazz, new Class<?>[0], new Object[0]);
+    } catch (MetaException e) {
+      LOG.error("Error loading PartitionExpressionProxy", e);
+      throw new RuntimeException("Error loading PartitionExpressionProxy: " + 
e.getMessage());
+    }
+  }
+
+  /**
+   * Makes expression tree out of expr.
+   * @param filter Filter.
+   * @return Expression tree. Null if there was an error.
+   */
+  private static ExpressionTree makeExpressionTree(String filter) throws 
MetaException {
+    // TODO: ExprNodeDesc is an expression tree, we could just use that and be 
rid of Filter.g.
+    if (filter == null || filter.isEmpty()) {
+      return ExpressionTree.EMPTY_TREE;
+    }
+    LOG.debug("Filter specified is " + filter);
+    ExpressionTree tree = null;
+    try {
+      tree = getFilterParser(filter).tree;
+    } catch (MetaException ex) {
+      LOG.info("Unable to make the expression tree from expression string ["
+          + filter + "]" + ex.getMessage()); // Don't log the stack, this is 
normal.
+    }
+    if (tree == null) {
+      return null;
+    }
+    // We suspect that LIKE pushdown into JDO is invalid; see HIVE-5134. Check 
for like here.
+    LikeChecker lc = new LikeChecker();
+    tree.accept(lc);
+    return lc.hasLike() ? null : tree;
+  }
+
+
+  private static class LikeChecker extends ExpressionTree.TreeVisitor {
+    private boolean hasLike;
+
+    public boolean hasLike() {
+      return hasLike;
+    }
+
+    @Override
+    protected boolean shouldStop() {
+      return hasLike;
+    }
+
+    @Override
+    protected void visit(LeafNode node) throws MetaException {
+      hasLike = hasLike || (node.operator == Operator.LIKE);
+    }
+  }
+
+  public static FilterParser getFilterParser(String filter) throws 
MetaException {
+    FilterLexer lexer = new FilterLexer(new ANTLRNoCaseStringStream(filter));
+    CommonTokenStream tokens = new CommonTokenStream(lexer);
+
+    FilterParser parser = new FilterParser(tokens);
+    try {
+      parser.filter();
+    } catch(RecognitionException re) {
+      throw new MetaException("Error parsing partition filter; lexer error: "
+          + lexer.errorMsg + "; exception " + re);
+    }
+
+    if (lexer.errorMsg != null) {
+      throw new MetaException("Error parsing partition filter : " + 
lexer.errorMsg);
+    }
+    return parser;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/133d3c47/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
new file mode 100644
index 0000000..0e6d8a4
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -0,0 +1,747 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.hive.metastore.api.AggrStats;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidInputException;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
+import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionEventType;
+import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse;
+import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
+import org.apache.hadoop.hive.metastore.api.Type;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
+import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
+import org.apache.thrift.TException;
+
+public interface RawStore extends Configurable {
+
+  /***
+   * Annotation to skip retries
+   */
+  @Target(value = ElementType.METHOD)
+  @Retention(value = RetentionPolicy.RUNTIME)
+  @interface CanNotRetry {
+  }
+
+  void shutdown();
+
+  /**
+   * Opens a new one or the one already created Every call of this function 
must
+   * have corresponding commit or rollback function call
+   *
+   * @return an active transaction
+   */
+
+  boolean openTransaction();
+
+  /**
+   * if this is the commit of the first open call then an actual commit is
+   * called.
+   *
+   * @return true or false
+   */
+  @CanNotRetry
+  boolean commitTransaction();
+
+  boolean isActiveTransaction();
+
+  /**
+   * Rolls back the current transaction if it is active
+   */
+  @CanNotRetry
+  void rollbackTransaction();
+
+  void createDatabase(Database db)
+      throws InvalidObjectException, MetaException;
+
+  Database getDatabase(String name)
+      throws NoSuchObjectException;
+
+  boolean dropDatabase(String dbname) throws NoSuchObjectException, 
MetaException;
+
+  boolean alterDatabase(String dbname, Database db) throws 
NoSuchObjectException, MetaException;
+
+  List<String> getDatabases(String pattern) throws MetaException;
+
+  List<String> getAllDatabases() throws MetaException;
+
+  boolean createType(Type type);
+
+  Type getType(String typeName);
+
+  boolean dropType(String typeName);
+
+  void createTable(Table tbl) throws InvalidObjectException,
+      MetaException;
+
+  boolean dropTable(String dbName, String tableName)
+      throws MetaException, NoSuchObjectException, InvalidObjectException, 
InvalidInputException;
+
+  Table getTable(String dbName, String tableName)
+      throws MetaException;
+
+  boolean addPartition(Partition part)
+      throws InvalidObjectException, MetaException;
+
+  boolean addPartitions(String dbName, String tblName, List<Partition> parts)
+      throws InvalidObjectException, MetaException;
+
+  boolean addPartitions(String dbName, String tblName, PartitionSpecProxy 
partitionSpec, boolean ifNotExists)
+      throws InvalidObjectException, MetaException;
+
+  Partition getPartition(String dbName, String tableName,
+      List<String> part_vals) throws MetaException, NoSuchObjectException;
+
+  boolean doesPartitionExist(String dbName, String tableName,
+      List<String> part_vals) throws MetaException, NoSuchObjectException;
+
+  boolean dropPartition(String dbName, String tableName,
+      List<String> part_vals) throws MetaException, NoSuchObjectException, 
InvalidObjectException,
+      InvalidInputException;
+
+  List<Partition> getPartitions(String dbName,
+      String tableName, int max) throws MetaException, NoSuchObjectException;
+
+  void alterTable(String dbname, String name, Table newTable)
+      throws InvalidObjectException, MetaException;
+
+  List<String> getTables(String dbName, String pattern)
+      throws MetaException;
+
+  List<String> getTables(String dbName, String pattern, TableType tableType)
+      throws MetaException;
+
+  List<TableMeta> getTableMeta(
+      String dbNames, String tableNames, List<String> tableTypes) throws 
MetaException;
+
+  /**
+   * @param dbname
+   *        The name of the database from which to retrieve the tables
+   * @param tableNames
+   *        The names of the tables to retrieve.
+   * @return A list of the tables retrievable from the database
+   *          whose names are in the list tableNames.
+   *         If there are duplicate names, only one instance of the table will 
be returned
+   * @throws MetaException
+   */
+  List<Table> getTableObjectsByName(String dbname, List<String> tableNames)
+      throws MetaException, UnknownDBException;
+
+  List<String> getAllTables(String dbName) throws MetaException;
+
+  /**
+   * Gets a list of tables based on a filter string and filter type.
+   * @param dbName
+   *          The name of the database from which you will retrieve the table 
names
+   * @param filter
+   *          The filter string
+   * @param max_tables
+   *          The maximum number of tables returned
+   * @return  A list of table names that match the desired filter
+   * @throws MetaException
+   * @throws UnknownDBException
+   */
+  List<String> listTableNamesByFilter(String dbName,
+      String filter, short max_tables) throws MetaException, 
UnknownDBException;
+
+  List<String> listPartitionNames(String db_name,
+      String tbl_name, short max_parts) throws MetaException;
+
+  PartitionValuesResponse listPartitionValues(String db_name, String tbl_name,
+                                              List<FieldSchema> cols, boolean 
applyDistinct, String filter, boolean ascending,
+                                              List<FieldSchema> order, long 
maxParts) throws MetaException;
+
+  List<String> listPartitionNamesByFilter(String db_name,
+      String tbl_name, String filter, short max_parts) throws MetaException;
+
+  void alterPartition(String db_name, String tbl_name, List<String> part_vals,
+      Partition new_part) throws InvalidObjectException, MetaException;
+
+  void alterPartitions(String db_name, String tbl_name,
+      List<List<String>> part_vals_list, List<Partition> new_parts)
+      throws InvalidObjectException, MetaException;
+
+  boolean addIndex(Index index)
+      throws InvalidObjectException, MetaException;
+
+  Index getIndex(String dbName, String origTableName, String indexName) throws 
MetaException;
+
+  boolean dropIndex(String dbName, String origTableName, String indexName) 
throws MetaException;
+
+  List<Index> getIndexes(String dbName,
+      String origTableName, int max) throws MetaException;
+
+  List<String> listIndexNames(String dbName,
+      String origTableName, short max) throws MetaException;
+
+  void alterIndex(String dbname, String baseTblName, String name, Index 
newIndex)
+      throws InvalidObjectException, MetaException;
+
+  List<Partition> getPartitionsByFilter(
+      String dbName, String tblName, String filter, short maxParts)
+      throws MetaException, NoSuchObjectException;
+
+  boolean getPartitionsByExpr(String dbName, String tblName,
+      byte[] expr, String defaultPartitionName, short maxParts, 
List<Partition> result)
+      throws TException;
+
+  int getNumPartitionsByFilter(String dbName, String tblName, String filter)
+    throws MetaException, NoSuchObjectException;
+
+  int getNumPartitionsByExpr(String dbName, String tblName, byte[] expr) 
throws MetaException, NoSuchObjectException;
+
+  List<Partition> getPartitionsByNames(
+      String dbName, String tblName, List<String> partNames)
+      throws MetaException, NoSuchObjectException;
+
+  Table markPartitionForEvent(String dbName, String tblName, 
Map<String,String> partVals, PartitionEventType evtType) throws MetaException, 
UnknownTableException, InvalidPartitionException, UnknownPartitionException;
+
+  boolean isPartitionMarkedForEvent(String dbName, String tblName, Map<String, 
String> partName, PartitionEventType evtType) throws MetaException, 
UnknownTableException, InvalidPartitionException, UnknownPartitionException;
+
+  boolean addRole(String rowName, String ownerName)
+      throws InvalidObjectException, MetaException, NoSuchObjectException;
+
+  boolean removeRole(String roleName) throws MetaException, 
NoSuchObjectException;
+
+  boolean grantRole(Role role, String userName, PrincipalType principalType,
+      String grantor, PrincipalType grantorType, boolean grantOption)
+      throws MetaException, NoSuchObjectException, InvalidObjectException;
+
+  boolean revokeRole(Role role, String userName, PrincipalType principalType,
+      boolean grantOption) throws MetaException, NoSuchObjectException;
+
+  PrincipalPrivilegeSet getUserPrivilegeSet(String userName,
+      List<String> groupNames) throws InvalidObjectException, MetaException;
+
+  PrincipalPrivilegeSet getDBPrivilegeSet (String dbName, String userName,
+      List<String> groupNames)  throws InvalidObjectException, MetaException;
+
+  PrincipalPrivilegeSet getTablePrivilegeSet (String dbName, String tableName,
+      String userName, List<String> groupNames) throws InvalidObjectException, 
MetaException;
+
+  PrincipalPrivilegeSet getPartitionPrivilegeSet (String dbName, String 
tableName,
+      String partition, String userName, List<String> groupNames) throws 
InvalidObjectException, MetaException;
+
+  PrincipalPrivilegeSet getColumnPrivilegeSet (String dbName, String 
tableName, String partitionName,
+      String columnName, String userName, List<String> groupNames) throws 
InvalidObjectException, MetaException;
+
+  List<HiveObjectPrivilege> listPrincipalGlobalGrants(String principalName,
+      PrincipalType principalType);
+
+  List<HiveObjectPrivilege> listPrincipalDBGrants(String principalName,
+      PrincipalType principalType, String dbName);
+
+  List<HiveObjectPrivilege> listAllTableGrants(
+      String principalName, PrincipalType principalType, String dbName,
+      String tableName);
+
+  List<HiveObjectPrivilege> listPrincipalPartitionGrants(
+      String principalName, PrincipalType principalType, String dbName,
+      String tableName, List<String> partValues, String partName);
+
+  List<HiveObjectPrivilege> listPrincipalTableColumnGrants(
+      String principalName, PrincipalType principalType, String dbName,
+      String tableName, String columnName);
+
+  List<HiveObjectPrivilege> listPrincipalPartitionColumnGrants(
+      String principalName, PrincipalType principalType, String dbName,
+      String tableName, List<String> partValues, String partName, String 
columnName);
+
+  boolean grantPrivileges (PrivilegeBag privileges)
+      throws InvalidObjectException, MetaException, NoSuchObjectException;
+
+  boolean revokePrivileges  (PrivilegeBag privileges, boolean grantOption)
+  throws InvalidObjectException, MetaException, NoSuchObjectException;
+
+  org.apache.hadoop.hive.metastore.api.Role getRole(
+      String roleName) throws NoSuchObjectException;
+
+  List<String> listRoleNames();
+
+  List<Role> listRoles(String principalName,
+      PrincipalType principalType);
+
+  List<RolePrincipalGrant> listRolesWithGrants(String principalName,
+                                                      PrincipalType 
principalType);
+
+
+  /**
+   * Get the role to principal grant mapping for given role
+   * @param roleName
+   * @return
+   */
+  List<RolePrincipalGrant> listRoleMembers(String roleName);
+
+
+  Partition getPartitionWithAuth(String dbName, String tblName,
+      List<String> partVals, String user_name, List<String> group_names)
+      throws MetaException, NoSuchObjectException, InvalidObjectException;
+
+  List<Partition> getPartitionsWithAuth(String dbName,
+      String tblName, short maxParts, String userName, List<String> groupNames)
+      throws MetaException, NoSuchObjectException, InvalidObjectException;
+
+  /**
+   * Lists partition names that match a given partial specification
+   * @param db_name
+   *          The name of the database which has the partitions
+   * @param tbl_name
+   *          The name of the table which has the partitions
+   * @param part_vals
+   *          A partial list of values for partitions in order of the table's 
partition keys.
+   *          Entries can be empty if you only want to specify latter 
partitions.
+   * @param max_parts
+   *          The maximum number of partitions to return
+   * @return A list of partition names that match the partial spec.
+   * @throws MetaException
+   * @throws NoSuchObjectException
+   */
+  List<String> listPartitionNamesPs(String db_name, String tbl_name,
+      List<String> part_vals, short max_parts)
+      throws MetaException, NoSuchObjectException;
+
+  /**
+   * Lists partitions that match a given partial specification and sets their 
auth privileges.
+   *   If userName and groupNames null, then no auth privileges are set.
+   * @param db_name
+   *          The name of the database which has the partitions
+   * @param tbl_name
+   *          The name of the table which has the partitions
+   * @param part_vals
+   *          A partial list of values for partitions in order of the table's 
partition keys
+   *          Entries can be empty if you need to specify latter partitions.
+   * @param max_parts
+   *          The maximum number of partitions to return
+   * @param userName
+   *          The user name for the partition for authentication privileges
+   * @param groupNames
+   *          The groupNames for the partition for authentication privileges
+   * @return A list of partitions that match the partial spec.
+   * @throws MetaException
+   * @throws NoSuchObjectException
+   * @throws InvalidObjectException
+   */
+  List<Partition> listPartitionsPsWithAuth(String db_name, String tbl_name,
+      List<String> part_vals, short max_parts, String userName, List<String> 
groupNames)
+      throws MetaException, InvalidObjectException, NoSuchObjectException;
+
+  /** Persists the given column statistics object to the metastore
+   * @param colStats object to persist
+   * @return Boolean indicating the outcome of the operation
+   * @throws NoSuchObjectException
+   * @throws MetaException
+   * @throws InvalidObjectException
+   * @throws InvalidInputException
+   */
+  boolean updateTableColumnStatistics(ColumnStatistics colStats)
+      throws NoSuchObjectException, MetaException, InvalidObjectException, 
InvalidInputException;
+
+  /** Persists the given column statistics object to the metastore
+   * @param partVals
+   *
+   * @param statsObj object to persist
+   * @return Boolean indicating the outcome of the operation
+   * @throws NoSuchObjectException
+   * @throws MetaException
+   * @throws InvalidObjectException
+   * @throws InvalidInputException
+   */
+  boolean updatePartitionColumnStatistics(ColumnStatistics statsObj,
+     List<String> partVals)
+     throws NoSuchObjectException, MetaException, InvalidObjectException, 
InvalidInputException;
+
+  /**
+   * Returns the relevant column statistics for a given column in a given 
table in a given database
+   * if such statistics exist.
+   *
+   * @param dbName name of the database, defaults to current database
+   * @param tableName name of the table
+   * @param colName names of the columns for which statistics is requested
+   * @return Relevant column statistics for the column for the given table
+   * @throws NoSuchObjectException
+   * @throws MetaException
+   *
+   */
+  ColumnStatistics getTableColumnStatistics(String dbName, String tableName,
+    List<String> colName) throws MetaException, NoSuchObjectException;
+
+  /**
+   * Returns the relevant column statistics for given columns in given 
partitions in a given
+   * table in a given database if such statistics exist.
+   */
+  List<ColumnStatistics> getPartitionColumnStatistics(
+     String dbName, String tblName, List<String> partNames, List<String> 
colNames)
+      throws MetaException, NoSuchObjectException;
+
+  /**
+   * Deletes column statistics if present associated with a given db, table, 
partition and col. If
+   * null is passed instead of a colName, stats when present for all columns 
associated
+   * with a given db, table and partition are deleted.
+   *
+   * @param dbName
+   * @param tableName
+   * @param partName
+   * @param partVals
+   * @param colName
+   * @return Boolean indicating the outcome of the operation
+   * @throws NoSuchObjectException
+   * @throws MetaException
+   * @throws InvalidObjectException
+   * @throws InvalidInputException
+   */
+
+  boolean deletePartitionColumnStatistics(String dbName, String tableName,
+      String partName, List<String> partVals, String colName)
+      throws NoSuchObjectException, MetaException, InvalidObjectException, 
InvalidInputException;
+
+  /**
+   * Deletes column statistics if present associated with a given db, table 
and col. If
+   * null is passed instead of a colName, stats when present for all columns 
associated
+   * with a given db and table are deleted.
+   *
+   * @param dbName
+   * @param tableName
+   * @param colName
+   * @return Boolean indicating the outcome of the operation
+   * @throws NoSuchObjectException
+   * @throws MetaException
+   * @throws InvalidObjectException
+   * @throws InvalidInputException
+   */
+
+  boolean deleteTableColumnStatistics(String dbName, String tableName,
+    String colName)
+    throws NoSuchObjectException, MetaException, InvalidObjectException, 
InvalidInputException;
+
+  long cleanupEvents();
+
+  boolean addToken(String tokenIdentifier, String delegationToken);
+
+  boolean removeToken(String tokenIdentifier);
+
+  String getToken(String tokenIdentifier);
+
+  List<String> getAllTokenIdentifiers();
+
+  int addMasterKey(String key) throws MetaException;
+
+  void updateMasterKey(Integer seqNo, String key)
+     throws NoSuchObjectException, MetaException;
+
+  boolean removeMasterKey(Integer keySeq);
+
+  String[] getMasterKeys();
+
+  void verifySchema() throws MetaException;
+
+  String getMetaStoreSchemaVersion() throws  MetaException;
+
+  abstract void setMetaStoreSchemaVersion(String version, String comment) 
throws MetaException;
+
+  void dropPartitions(String dbName, String tblName, List<String> partNames)
+      throws MetaException, NoSuchObjectException;
+
+  List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
+      String principalName, PrincipalType principalType);
+
+  List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
+      String principalName, PrincipalType principalType);
+
+  List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
+      String principalName, PrincipalType principalType);
+
+  List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
+      String principalName, PrincipalType principalType);
+
+  List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
+      String principalName, PrincipalType principalType);
+
+  List<HiveObjectPrivilege> listGlobalGrantsAll();
+
+  List<HiveObjectPrivilege> listDBGrantsAll(String dbName);
+
+  List<HiveObjectPrivilege> listPartitionColumnGrantsAll(
+      String dbName, String tableName, String partitionName, String 
columnName);
+
+  List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String 
tableName);
+
+  List<HiveObjectPrivilege> listPartitionGrantsAll(
+      String dbName, String tableName, String partitionName);
+
+  List<HiveObjectPrivilege> listTableColumnGrantsAll(
+      String dbName, String tableName, String columnName);
+
+  /**
+   * Register a user-defined function based on the function specification 
passed in.
+   * @param func
+   * @throws InvalidObjectException
+   * @throws MetaException
+   */
+  void createFunction(Function func)
+      throws InvalidObjectException, MetaException;
+
+  /**
+   * Alter function based on new function specs.
+   * @param dbName
+   * @param funcName
+   * @param newFunction
+   * @throws InvalidObjectException
+   * @throws MetaException
+   */
+  void alterFunction(String dbName, String funcName, Function newFunction)
+      throws InvalidObjectException, MetaException;
+
+  /**
+   * Drop a function definition.
+   * @param dbName
+   * @param funcName
+   * @throws MetaException
+   * @throws NoSuchObjectException
+   * @throws InvalidObjectException
+   * @throws InvalidInputException
+   */
+  void dropFunction(String dbName, String funcName)
+      throws MetaException, NoSuchObjectException, InvalidObjectException, 
InvalidInputException;
+
+  /**
+   * Retrieve function by name.
+   * @param dbName
+   * @param funcName
+   * @return
+   * @throws MetaException
+   */
+  Function getFunction(String dbName, String funcName) throws MetaException;
+
+  /**
+   * Retrieve all functions.
+   * @return
+   * @throws MetaException
+   */
+  List<Function> getAllFunctions() throws MetaException;
+
+  /**
+   * Retrieve list of function names based on name pattern.
+   * @param dbName
+   * @param pattern
+   * @return
+   * @throws MetaException
+   */
+  List<String> getFunctions(String dbName, String pattern) throws 
MetaException;
+
+  AggrStats get_aggr_stats_for(String dbName, String tblName,
+    List<String> partNames, List<String> colNames) throws MetaException, 
NoSuchObjectException;
+
+  /**
+   * Get all partition column statistics for a table in a db
+   *
+   * @param dbName
+   * @param tableName
+   * @return Map of partition column statistics. Key in the map is partition 
name. Value is a list
+   *         of column stat object for each column in the partition
+   * @throws MetaException
+   * @throws NoSuchObjectException
+   */
+  Map<String, List<ColumnStatisticsObj>> getColStatsForTablePartitions(String 
dbName,
+      String tableName) throws MetaException, NoSuchObjectException;
+
+  /**
+   * Get the next notification event.
+   * @param rqst Request containing information on the last processed 
notification.
+   * @return list of notifications, sorted by eventId
+   */
+  NotificationEventResponse getNextNotification(NotificationEventRequest rqst);
+
+
+  /**
+   * Add a notification entry.  This should only be called from inside the 
metastore
+   * @param event the notification to add
+   */
+  void addNotificationEvent(NotificationEvent event);
+
+  /**
+   * Remove older notification events.
+   * @param olderThan Remove any events older than a given number of seconds
+   */
+  void cleanNotificationEvents(int olderThan);
+
+  /**
+   * Get the last issued notification event id.  This is intended for use by 
the export command
+   * so that users can determine the state of the system at the point of the 
export,
+   * and determine which notification events happened before or after the 
export.
+   * @return
+   */
+  CurrentNotificationEventId getCurrentNotificationEventId();
+
+  /**
+   * Get the number of events corresponding to given database with fromEventId.
+   * This is intended for use by the repl commands to track the progress of 
incremental dump.
+   * @return
+   */
+  public NotificationEventsCountResponse 
getNotificationEventsCount(NotificationEventsCountRequest rqst);
+
+  /*
+   * Flush any catalog objects held by the metastore implementation.  Note 
that this does not
+   * flush statistics objects.  This should be called at the beginning of each 
query.
+   */
+  void flushCache();
+
+  /**
+   * @param fileIds List of file IDs from the filesystem.
+   * @return File metadata buffers from file metadata cache. The array is 
fileIds-sized, and
+   *         the entries (or nulls, if metadata is not in cache) correspond to 
fileIds in the list
+   */
+  ByteBuffer[] getFileMetadata(List<Long> fileIds) throws MetaException;
+
+  /**
+   * @param fileIds List of file IDs from the filesystem.
+   * @param metadata Metadata buffers corresponding to fileIds in the list.
+   * @param type The type; determines the class that can do additiona 
processing for metadata.
+   */
+  void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata,
+      FileMetadataExprType type) throws MetaException;
+
+  /**
+   * @return Whether file metadata cache is supported by this implementation.
+   */
+  boolean isFileMetadataSupported();
+
+  /**
+   * Gets file metadata from cache after applying a format-specific expression 
that can
+   * produce additional information based on file metadata and also filter the 
file list.
+   * @param fileIds List of file IDs from the filesystem.
+   * @param expr Format-specific serialized expression applicable to the 
files' metadatas.
+   * @param type Expression type; used to determine the class that handles the 
metadata.
+   * @param metadatas Output parameter; fileIds-sized array to receive the 
metadatas
+   *                  for corresponding files, if any.
+   * @param exprResults Output parameter; fileIds-sized array to receive the 
format-specific
+   *                    expression results for the corresponding files.
+   * @param eliminated Output parameter; fileIds-sized array to receive the 
indication of whether
+   *                   the corresponding files are entirely eliminated by the 
expression.
+   */
+  void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, 
byte[] expr,
+      ByteBuffer[] metadatas, ByteBuffer[] exprResults, boolean[] eliminated)
+          throws MetaException;
+
+  /** Gets file metadata handler for the corresponding type. */
+  FileMetadataHandler getFileMetadataHandler(FileMetadataExprType type);
+
+  /**
+   * Gets total number of tables.
+   */
+  @InterfaceStability.Evolving
+  int getTableCount() throws MetaException;
+
+  /**
+   * Gets total number of partitions.
+   */
+  @InterfaceStability.Evolving
+  int getPartitionCount() throws MetaException;
+
+  /**
+   * Gets total number of databases.
+   */
+  @InterfaceStability.Evolving
+  int getDatabaseCount() throws MetaException;
+
+  List<SQLPrimaryKey> getPrimaryKeys(String db_name,
+    String tbl_name) throws MetaException;
+
+  /**
+   * Get the foreign keys for a table.  All foreign keys for a particular 
table can be fetched by
+   * passing null for the last two arguments.
+   * @param parent_db_name Database the table referred to is in.  This can be 
null to match all
+   *                       databases.
+   * @param parent_tbl_name Table that is referred to.  This can be null to 
match all tables.
+   * @param foreign_db_name Database the table with the foreign key is in.
+   * @param foreign_tbl_name Table with the foreign key.
+   * @return List of all matching foreign key columns.  Note that if more than 
one foreign key
+   * matches the arguments the results here will be all mixed together into a 
single list.
+   * @throws MetaException if something goes wrong.
+   */
+  List<SQLForeignKey> getForeignKeys(String parent_db_name,
+    String parent_tbl_name, String foreign_db_name, String foreign_tbl_name)
+    throws MetaException;
+
+  List<SQLUniqueConstraint> getUniqueConstraints(String db_name,
+    String tbl_name) throws MetaException;
+
+  List<SQLNotNullConstraint> getNotNullConstraints(String db_name,
+    String tbl_name) throws MetaException;
+
+  List<String> createTableWithConstraints(Table tbl, List<SQLPrimaryKey> 
primaryKeys,
+    List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> 
uniqueConstraints,
+    List<SQLNotNullConstraint> notNullConstraints) throws 
InvalidObjectException, MetaException;
+
+  void dropConstraint(String dbName, String tableName, String constraintName) 
throws NoSuchObjectException;
+
+  List<String> addPrimaryKeys(List<SQLPrimaryKey> pks) throws 
InvalidObjectException, MetaException;
+
+  List<String> addForeignKeys(List<SQLForeignKey> fks) throws 
InvalidObjectException, MetaException;
+
+  List<String> addUniqueConstraints(List<SQLUniqueConstraint> uks) throws 
InvalidObjectException, MetaException;
+
+  List<String> addNotNullConstraints(List<SQLNotNullConstraint> nns) throws 
InvalidObjectException, MetaException;
+
+  /**
+   * Gets the unique id of the backing datastore for the metadata
+   * @return
+   * @throws MetaException
+   */
+  String getMetastoreDbUuid() throws MetaException;
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/133d3c47/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
new file mode 100644
index 0000000..2fd2268
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.lang.ClassUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.utils.JavaUtils;
+import org.apache.hadoop.util.ReflectionUtils;
+
[email protected]
[email protected]
+public class RawStoreProxy implements InvocationHandler {
+
+  private final RawStore base;
+  private final MetaStoreInit.MetaStoreInitData metaStoreInitData =
+    new MetaStoreInit.MetaStoreInitData();
+  private final Configuration hiveConf;
+  private final Configuration conf; // thread local conf from HMS
+  private final long socketTimeout;
+
+  protected RawStoreProxy(Configuration hiveConf, Configuration conf,
+      Class<? extends RawStore> rawStoreClass, int id) throws MetaException {
+    this.conf = conf;
+    this.hiveConf = hiveConf;
+    this.socketTimeout = MetastoreConf.getTimeVar(hiveConf,
+        MetastoreConf.ConfVars.CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS);
+
+    // This has to be called before initializing the instance of RawStore
+    init();
+
+    this.base = ReflectionUtils.newInstance(rawStoreClass, conf);
+  }
+
+  public static RawStore getProxy(Configuration hiveConf, Configuration conf, 
String rawStoreClassName,
+      int id) throws MetaException {
+
+    Class<? extends RawStore> baseClass = 
JavaUtils.getClass(rawStoreClassName, RawStore.class);
+
+    RawStoreProxy handler = new RawStoreProxy(hiveConf, conf, baseClass, id);
+
+    // Look for interfaces on both the class and all base classes.
+    return (RawStore) 
Proxy.newProxyInstance(RawStoreProxy.class.getClassLoader(),
+        getAllInterfaces(baseClass), handler);
+  }
+
+  private static Class<?>[] getAllInterfaces(Class<?> baseClass) {
+    List interfaces = ClassUtils.getAllInterfaces(baseClass);
+    Class<?>[] result = new Class<?>[interfaces.size()];
+    int i = 0;
+    for (Object o : interfaces) {
+      result[i++] = (Class<?>)o;
+    }
+    return result;
+  }
+
+  private void init() throws MetaException {
+    // Using the hook on startup ensures that the hook always has priority
+    // over settings in *.xml.  The thread local conf needs to be used because 
at this point
+    // it has already been initialized using hiveConf.
+    MetaStoreInit.updateConnectionURL(hiveConf, getConf(), null, 
metaStoreInitData);
+  }
+
+  @Override
+  public Object invoke(Object proxy, Method method, Object[] args) throws 
Throwable {
+    try {
+      Deadline.registerIfNot(socketTimeout);
+      boolean isTimerStarted = Deadline.startTimer(method.getName());
+      try {
+        return method.invoke(base, args);
+      } finally {
+        if (isTimerStarted) {
+          Deadline.stopTimer();
+        }
+      }
+    } catch (UndeclaredThrowableException e) {
+      throw e.getCause();
+    } catch (InvocationTargetException e) {
+      throw e.getCause();
+    }
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/133d3c47/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
new file mode 100644
index 0000000..08ea67f
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
@@ -0,0 +1,888 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData._Fields;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Date;
+import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.Decimal;
+import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
+import 
org.apache.hadoop.hive.metastore.columnstats.cache.DateColumnStatsDataInspector;
+import 
org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector;
+import 
org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataInspector;
+import 
org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataInspector;
+import 
org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;
+import org.apache.hadoop.hive.metastore.model.MPartition;
+import org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics;
+import org.apache.hadoop.hive.metastore.model.MTable;
+import org.apache.hadoop.hive.metastore.model.MTableColumnStatistics;
+
+/**
+ * This class contains conversion logic that creates Thrift stat objects from
+ * JDO stat objects and plain arrays from DirectSQL.
+ * It is hidden here so that we wouldn't have to look at it in elsewhere.
+ */
+public class StatObjectConverter {
+  // JDO
+  public static MTableColumnStatistics convertToMTableColumnStatistics(MTable 
table,
+      ColumnStatisticsDesc statsDesc, ColumnStatisticsObj statsObj)
+          throws NoSuchObjectException, MetaException, InvalidObjectException {
+     if (statsObj == null || statsDesc == null) {
+       throw new InvalidObjectException("Invalid column stats object");
+     }
+
+     MTableColumnStatistics mColStats = new MTableColumnStatistics();
+     mColStats.setTable(table);
+     mColStats.setDbName(statsDesc.getDbName());
+     mColStats.setTableName(statsDesc.getTableName());
+     mColStats.setLastAnalyzed(statsDesc.getLastAnalyzed());
+     mColStats.setColName(statsObj.getColName());
+     mColStats.setColType(statsObj.getColType());
+
+     if (statsObj.getStatsData().isSetBooleanStats()) {
+       BooleanColumnStatsData boolStats = 
statsObj.getStatsData().getBooleanStats();
+       mColStats.setBooleanStats(
+           boolStats.isSetNumTrues() ? boolStats.getNumTrues() : null,
+           boolStats.isSetNumFalses() ? boolStats.getNumFalses() : null,
+           boolStats.isSetNumNulls() ? boolStats.getNumNulls() : null);
+     } else if (statsObj.getStatsData().isSetLongStats()) {
+       LongColumnStatsData longStats = statsObj.getStatsData().getLongStats();
+       mColStats.setLongStats(
+           longStats.isSetNumNulls() ? longStats.getNumNulls() : null,
+           longStats.isSetNumDVs() ? longStats.getNumDVs() : null,
+           longStats.isSetBitVectors() ? longStats.getBitVectors() : null,
+           longStats.isSetLowValue() ? longStats.getLowValue() : null,
+           longStats.isSetHighValue() ? longStats.getHighValue() : null);
+     } else if (statsObj.getStatsData().isSetDoubleStats()) {
+       DoubleColumnStatsData doubleStats = 
statsObj.getStatsData().getDoubleStats();
+       mColStats.setDoubleStats(
+           doubleStats.isSetNumNulls() ? doubleStats.getNumNulls() : null,
+           doubleStats.isSetNumDVs() ? doubleStats.getNumDVs() : null,
+           doubleStats.isSetBitVectors() ? doubleStats.getBitVectors() : null,
+           doubleStats.isSetLowValue() ? doubleStats.getLowValue() : null,
+           doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null);
+     } else if (statsObj.getStatsData().isSetDecimalStats()) {
+       DecimalColumnStatsData decimalStats = 
statsObj.getStatsData().getDecimalStats();
+       String low = decimalStats.isSetLowValue() ? 
createJdoDecimalString(decimalStats.getLowValue()) : null;
+       String high = decimalStats.isSetHighValue() ? 
createJdoDecimalString(decimalStats.getHighValue()) : null;
+       mColStats.setDecimalStats(
+           decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null,
+           decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null,
+           decimalStats.isSetBitVectors() ? decimalStats.getBitVectors() : 
null,
+               low, high);
+     } else if (statsObj.getStatsData().isSetStringStats()) {
+       StringColumnStatsData stringStats = 
statsObj.getStatsData().getStringStats();
+       mColStats.setStringStats(
+           stringStats.isSetNumNulls() ? stringStats.getNumNulls() : null,
+           stringStats.isSetNumDVs() ? stringStats.getNumDVs() : null,
+           stringStats.isSetBitVectors() ? stringStats.getBitVectors() : null,
+           stringStats.isSetMaxColLen() ? stringStats.getMaxColLen() : null,
+           stringStats.isSetAvgColLen() ? stringStats.getAvgColLen() : null);
+     } else if (statsObj.getStatsData().isSetBinaryStats()) {
+       BinaryColumnStatsData binaryStats = 
statsObj.getStatsData().getBinaryStats();
+       mColStats.setBinaryStats(
+           binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null,
+           binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
+           binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
+     } else if (statsObj.getStatsData().isSetDateStats()) {
+       DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
+       mColStats.setDateStats(
+           dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null,
+           dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null,
+           dateStats.isSetBitVectors() ? dateStats.getBitVectors() : null,
+           dateStats.isSetLowValue() ? 
dateStats.getLowValue().getDaysSinceEpoch() : null,
+           dateStats.isSetHighValue() ? 
dateStats.getHighValue().getDaysSinceEpoch() : null);
+     }
+     return mColStats;
+  }
+
+  public static void setFieldsIntoOldStats(
+      MTableColumnStatistics mStatsObj, MTableColumnStatistics oldStatsObj) {
+    if (mStatsObj.getAvgColLen() != null) {
+      oldStatsObj.setAvgColLen(mStatsObj.getAvgColLen());
+    }
+    if (mStatsObj.getLongHighValue() != null) {
+      oldStatsObj.setLongHighValue(mStatsObj.getLongHighValue());
+    }
+    if (mStatsObj.getLongLowValue() != null) {
+      oldStatsObj.setLongLowValue(mStatsObj.getLongLowValue());
+    }
+    if (mStatsObj.getDoubleLowValue() != null) {
+      oldStatsObj.setDoubleLowValue(mStatsObj.getDoubleLowValue());
+    }
+    if (mStatsObj.getDoubleHighValue() != null) {
+      oldStatsObj.setDoubleHighValue(mStatsObj.getDoubleHighValue());
+    }
+    if (mStatsObj.getDecimalLowValue() != null) {
+      oldStatsObj.setDecimalLowValue(mStatsObj.getDecimalLowValue());
+    }
+    if (mStatsObj.getDecimalHighValue() != null) {
+      oldStatsObj.setDecimalHighValue(mStatsObj.getDecimalHighValue());
+    }
+    if (mStatsObj.getMaxColLen() != null) {
+      oldStatsObj.setMaxColLen(mStatsObj.getMaxColLen());
+    }
+    if (mStatsObj.getNumDVs() != null) {
+      oldStatsObj.setNumDVs(mStatsObj.getNumDVs());
+    }
+    if (mStatsObj.getBitVector() != null) {
+      oldStatsObj.setBitVector(mStatsObj.getBitVector());
+    }
+    if (mStatsObj.getNumFalses() != null) {
+      oldStatsObj.setNumFalses(mStatsObj.getNumFalses());
+    }
+    if (mStatsObj.getNumTrues() != null) {
+      oldStatsObj.setNumTrues(mStatsObj.getNumTrues());
+    }
+    if (mStatsObj.getNumNulls() != null) {
+      oldStatsObj.setNumNulls(mStatsObj.getNumNulls());
+    }
+    oldStatsObj.setLastAnalyzed(mStatsObj.getLastAnalyzed());
+  }
+
+  public static void setFieldsIntoOldStats(
+      MPartitionColumnStatistics mStatsObj, MPartitionColumnStatistics 
oldStatsObj) {
+    if (mStatsObj.getAvgColLen() != null) {
+          oldStatsObj.setAvgColLen(mStatsObj.getAvgColLen());
+    }
+    if (mStatsObj.getLongHighValue() != null) {
+      oldStatsObj.setLongHighValue(mStatsObj.getLongHighValue());
+    }
+    if (mStatsObj.getDoubleHighValue() != null) {
+      oldStatsObj.setDoubleHighValue(mStatsObj.getDoubleHighValue());
+    }
+    oldStatsObj.setLastAnalyzed(mStatsObj.getLastAnalyzed());
+    if (mStatsObj.getLongLowValue() != null) {
+      oldStatsObj.setLongLowValue(mStatsObj.getLongLowValue());
+    }
+    if (mStatsObj.getDoubleLowValue() != null) {
+      oldStatsObj.setDoubleLowValue(mStatsObj.getDoubleLowValue());
+    }
+    if (mStatsObj.getDecimalLowValue() != null) {
+      oldStatsObj.setDecimalLowValue(mStatsObj.getDecimalLowValue());
+    }
+    if (mStatsObj.getDecimalHighValue() != null) {
+      oldStatsObj.setDecimalHighValue(mStatsObj.getDecimalHighValue());
+    }
+    if (mStatsObj.getMaxColLen() != null) {
+      oldStatsObj.setMaxColLen(mStatsObj.getMaxColLen());
+    }
+    if (mStatsObj.getNumDVs() != null) {
+      oldStatsObj.setNumDVs(mStatsObj.getNumDVs());
+    }
+    if (mStatsObj.getBitVector() != null) {
+      oldStatsObj.setBitVector(mStatsObj.getBitVector());
+    }
+    if (mStatsObj.getNumFalses() != null) {
+      oldStatsObj.setNumFalses(mStatsObj.getNumFalses());
+    }
+    if (mStatsObj.getNumTrues() != null) {
+      oldStatsObj.setNumTrues(mStatsObj.getNumTrues());
+    }
+    if (mStatsObj.getNumNulls() != null) {
+      oldStatsObj.setNumNulls(mStatsObj.getNumNulls());
+    }
+  }
+
+  public static ColumnStatisticsObj getTableColumnStatisticsObj(
+      MTableColumnStatistics mStatsObj, boolean enableBitVector) {
+    ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
+    statsObj.setColType(mStatsObj.getColType());
+    statsObj.setColName(mStatsObj.getColName());
+    String colType = mStatsObj.getColType().toLowerCase();
+    ColumnStatisticsData colStatsData = new ColumnStatisticsData();
+
+    if (colType.equals("boolean")) {
+      BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
+      boolStats.setNumFalses(mStatsObj.getNumFalses());
+      boolStats.setNumTrues(mStatsObj.getNumTrues());
+      boolStats.setNumNulls(mStatsObj.getNumNulls());
+      colStatsData.setBooleanStats(boolStats);
+    } else if (colType.equals("string") ||
+        colType.startsWith("varchar") || colType.startsWith("char")) {
+      StringColumnStatsDataInspector stringStats = new 
StringColumnStatsDataInspector();
+      stringStats.setNumNulls(mStatsObj.getNumNulls());
+      stringStats.setAvgColLen(mStatsObj.getAvgColLen());
+      stringStats.setMaxColLen(mStatsObj.getMaxColLen());
+      stringStats.setNumDVs(mStatsObj.getNumDVs());
+      
stringStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setStringStats(stringStats);
+    } else if (colType.equals("binary")) {
+      BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
+      binaryStats.setNumNulls(mStatsObj.getNumNulls());
+      binaryStats.setAvgColLen(mStatsObj.getAvgColLen());
+      binaryStats.setMaxColLen(mStatsObj.getMaxColLen());
+      colStatsData.setBinaryStats(binaryStats);
+    } else if (colType.equals("bigint") || colType.equals("int") ||
+        colType.equals("smallint") || colType.equals("tinyint") ||
+        colType.equals("timestamp")) {
+      LongColumnStatsDataInspector longStats = new 
LongColumnStatsDataInspector();
+      longStats.setNumNulls(mStatsObj.getNumNulls());
+      Long longHighValue = mStatsObj.getLongHighValue();
+      if (longHighValue != null) {
+        longStats.setHighValue(longHighValue);
+      }
+      Long longLowValue = mStatsObj.getLongLowValue();
+      if (longLowValue != null) {
+        longStats.setLowValue(longLowValue);
+      }
+      longStats.setNumDVs(mStatsObj.getNumDVs());
+      
longStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setLongStats(longStats);
+    } else if (colType.equals("double") || colType.equals("float")) {
+      DoubleColumnStatsDataInspector doubleStats = new 
DoubleColumnStatsDataInspector();
+      doubleStats.setNumNulls(mStatsObj.getNumNulls());
+      Double doubleHighValue = mStatsObj.getDoubleHighValue();
+      if (doubleHighValue != null) {
+        doubleStats.setHighValue(doubleHighValue);
+      }
+      Double doubleLowValue = mStatsObj.getDoubleLowValue();
+      if (doubleLowValue != null) {
+        doubleStats.setLowValue(doubleLowValue);
+      }
+      doubleStats.setNumDVs(mStatsObj.getNumDVs());
+      
doubleStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setDoubleStats(doubleStats);
+    } else if (colType.startsWith("decimal")) {
+      DecimalColumnStatsDataInspector decimalStats = new 
DecimalColumnStatsDataInspector();
+      decimalStats.setNumNulls(mStatsObj.getNumNulls());
+      String decimalHighValue = mStatsObj.getDecimalHighValue();
+      if (decimalHighValue != null) {
+        decimalStats.setHighValue(createThriftDecimal(decimalHighValue));
+      }
+      String decimalLowValue = mStatsObj.getDecimalLowValue();
+      if (decimalLowValue != null) {
+        decimalStats.setLowValue(createThriftDecimal(decimalLowValue));
+      }
+      decimalStats.setNumDVs(mStatsObj.getNumDVs());
+      
decimalStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setDecimalStats(decimalStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsDataInspector dateStats = new 
DateColumnStatsDataInspector();
+      dateStats.setNumNulls(mStatsObj.getNumNulls());
+      Long highValue = mStatsObj.getLongHighValue();
+      if (highValue != null) {
+        dateStats.setHighValue(new Date(highValue));
+      }
+      Long lowValue = mStatsObj.getLongLowValue();
+      if (lowValue != null) {
+        dateStats.setLowValue(new Date(lowValue));
+      }
+      dateStats.setNumDVs(mStatsObj.getNumDVs());
+      
dateStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setDateStats(dateStats);
+    }
+    statsObj.setStatsData(colStatsData);
+    return statsObj;
+  }
+
+  public static ColumnStatisticsDesc getTableColumnStatisticsDesc(
+      MTableColumnStatistics mStatsObj) {
+    ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc();
+    statsDesc.setIsTblLevel(true);
+    statsDesc.setDbName(mStatsObj.getDbName());
+    statsDesc.setTableName(mStatsObj.getTableName());
+    statsDesc.setLastAnalyzed(mStatsObj.getLastAnalyzed());
+    return statsDesc;
+  }
+
+  public static MPartitionColumnStatistics convertToMPartitionColumnStatistics(
+      MPartition partition, ColumnStatisticsDesc statsDesc, 
ColumnStatisticsObj statsObj)
+          throws MetaException, NoSuchObjectException {
+    if (statsDesc == null || statsObj == null) {
+      return null;
+    }
+
+    MPartitionColumnStatistics mColStats = new MPartitionColumnStatistics();
+    mColStats.setPartition(partition);
+    mColStats.setDbName(statsDesc.getDbName());
+    mColStats.setTableName(statsDesc.getTableName());
+    mColStats.setPartitionName(statsDesc.getPartName());
+    mColStats.setLastAnalyzed(statsDesc.getLastAnalyzed());
+    mColStats.setColName(statsObj.getColName());
+    mColStats.setColType(statsObj.getColType());
+
+    if (statsObj.getStatsData().isSetBooleanStats()) {
+      BooleanColumnStatsData boolStats = 
statsObj.getStatsData().getBooleanStats();
+      mColStats.setBooleanStats(
+          boolStats.isSetNumTrues() ? boolStats.getNumTrues() : null,
+          boolStats.isSetNumFalses() ? boolStats.getNumFalses() : null,
+          boolStats.isSetNumNulls() ? boolStats.getNumNulls() : null);
+    } else if (statsObj.getStatsData().isSetLongStats()) {
+      LongColumnStatsData longStats = statsObj.getStatsData().getLongStats();
+      mColStats.setLongStats(
+          longStats.isSetNumNulls() ? longStats.getNumNulls() : null,
+          longStats.isSetNumDVs() ? longStats.getNumDVs() : null,
+          longStats.isSetBitVectors() ? longStats.getBitVectors() : null,
+          longStats.isSetLowValue() ? longStats.getLowValue() : null,
+          longStats.isSetHighValue() ? longStats.getHighValue() : null);
+    } else if (statsObj.getStatsData().isSetDoubleStats()) {
+      DoubleColumnStatsData doubleStats = 
statsObj.getStatsData().getDoubleStats();
+      mColStats.setDoubleStats(
+          doubleStats.isSetNumNulls() ? doubleStats.getNumNulls() : null,
+          doubleStats.isSetNumDVs() ? doubleStats.getNumDVs() : null,
+          doubleStats.isSetBitVectors() ? doubleStats.getBitVectors() : null,
+          doubleStats.isSetLowValue() ? doubleStats.getLowValue() : null,
+          doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null);
+    } else if (statsObj.getStatsData().isSetDecimalStats()) {
+      DecimalColumnStatsData decimalStats = 
statsObj.getStatsData().getDecimalStats();
+      String low = decimalStats.isSetLowValue() ? 
createJdoDecimalString(decimalStats.getLowValue()) : null;
+      String high = decimalStats.isSetHighValue() ? 
createJdoDecimalString(decimalStats.getHighValue()) : null;
+      mColStats.setDecimalStats(
+          decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null,
+          decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null,
+          decimalStats.isSetBitVectors() ? decimalStats.getBitVectors() : null,
+              low, high);
+    } else if (statsObj.getStatsData().isSetStringStats()) {
+      StringColumnStatsData stringStats = 
statsObj.getStatsData().getStringStats();
+      mColStats.setStringStats(
+          stringStats.isSetNumNulls() ? stringStats.getNumNulls() : null,
+          stringStats.isSetNumDVs() ? stringStats.getNumDVs() : null,
+          stringStats.isSetBitVectors() ? stringStats.getBitVectors() : null,
+          stringStats.isSetMaxColLen() ? stringStats.getMaxColLen() : null,
+          stringStats.isSetAvgColLen() ? stringStats.getAvgColLen() : null);
+    } else if (statsObj.getStatsData().isSetBinaryStats()) {
+      BinaryColumnStatsData binaryStats = 
statsObj.getStatsData().getBinaryStats();
+      mColStats.setBinaryStats(
+          binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null,
+          binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
+          binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
+    } else if (statsObj.getStatsData().isSetDateStats()) {
+      DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
+      mColStats.setDateStats(
+          dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null,
+          dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null,
+          dateStats.isSetBitVectors() ? dateStats.getBitVectors() : null,
+          dateStats.isSetLowValue() ? 
dateStats.getLowValue().getDaysSinceEpoch() : null,
+          dateStats.isSetHighValue() ? 
dateStats.getHighValue().getDaysSinceEpoch() : null);
+    }
+    return mColStats;
+  }
+
+  public static ColumnStatisticsObj getPartitionColumnStatisticsObj(
+      MPartitionColumnStatistics mStatsObj, boolean enableBitVector) {
+    ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
+    statsObj.setColType(mStatsObj.getColType());
+    statsObj.setColName(mStatsObj.getColName());
+    String colType = mStatsObj.getColType().toLowerCase();
+    ColumnStatisticsData colStatsData = new ColumnStatisticsData();
+
+    if (colType.equals("boolean")) {
+      BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
+      boolStats.setNumFalses(mStatsObj.getNumFalses());
+      boolStats.setNumTrues(mStatsObj.getNumTrues());
+      boolStats.setNumNulls(mStatsObj.getNumNulls());
+      colStatsData.setBooleanStats(boolStats);
+    } else if (colType.equals("string") ||
+        colType.startsWith("varchar") || colType.startsWith("char")) {
+      StringColumnStatsDataInspector stringStats = new 
StringColumnStatsDataInspector();
+      stringStats.setNumNulls(mStatsObj.getNumNulls());
+      stringStats.setAvgColLen(mStatsObj.getAvgColLen());
+      stringStats.setMaxColLen(mStatsObj.getMaxColLen());
+      stringStats.setNumDVs(mStatsObj.getNumDVs());
+      
stringStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setStringStats(stringStats);
+    } else if (colType.equals("binary")) {
+      BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
+      binaryStats.setNumNulls(mStatsObj.getNumNulls());
+      binaryStats.setAvgColLen(mStatsObj.getAvgColLen());
+      binaryStats.setMaxColLen(mStatsObj.getMaxColLen());
+      colStatsData.setBinaryStats(binaryStats);
+    } else if (colType.equals("tinyint") || colType.equals("smallint") ||
+        colType.equals("int") || colType.equals("bigint") ||
+        colType.equals("timestamp")) {
+      LongColumnStatsDataInspector longStats = new 
LongColumnStatsDataInspector();
+      longStats.setNumNulls(mStatsObj.getNumNulls());
+      if (mStatsObj.getLongHighValue() != null) {
+        longStats.setHighValue(mStatsObj.getLongHighValue());
+      }
+      if (mStatsObj.getLongLowValue() != null) {
+        longStats.setLowValue(mStatsObj.getLongLowValue());
+      }
+      longStats.setNumDVs(mStatsObj.getNumDVs());
+      
longStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setLongStats(longStats);
+    } else if (colType.equals("double") || colType.equals("float")) {
+      DoubleColumnStatsDataInspector doubleStats = new 
DoubleColumnStatsDataInspector();
+      doubleStats.setNumNulls(mStatsObj.getNumNulls());
+      if (mStatsObj.getDoubleHighValue() != null) {
+        doubleStats.setHighValue(mStatsObj.getDoubleHighValue());
+      }
+      if (mStatsObj.getDoubleLowValue() != null) {
+        doubleStats.setLowValue(mStatsObj.getDoubleLowValue());
+      }
+      doubleStats.setNumDVs(mStatsObj.getNumDVs());
+      
doubleStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setDoubleStats(doubleStats);
+    } else if (colType.startsWith("decimal")) {
+      DecimalColumnStatsDataInspector decimalStats = new 
DecimalColumnStatsDataInspector();
+      decimalStats.setNumNulls(mStatsObj.getNumNulls());
+      if (mStatsObj.getDecimalHighValue() != null) {
+        
decimalStats.setHighValue(createThriftDecimal(mStatsObj.getDecimalHighValue()));
+      }
+      if (mStatsObj.getDecimalLowValue() != null) {
+        
decimalStats.setLowValue(createThriftDecimal(mStatsObj.getDecimalLowValue()));
+      }
+      decimalStats.setNumDVs(mStatsObj.getNumDVs());
+      
decimalStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setDecimalStats(decimalStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsDataInspector dateStats = new 
DateColumnStatsDataInspector();
+      dateStats.setNumNulls(mStatsObj.getNumNulls());
+      dateStats.setHighValue(new Date(mStatsObj.getLongHighValue()));
+      dateStats.setLowValue(new Date(mStatsObj.getLongLowValue()));
+      dateStats.setNumDVs(mStatsObj.getNumDVs());
+      
dateStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? 
null : mStatsObj.getBitVector());
+      colStatsData.setDateStats(dateStats);
+    }
+    statsObj.setStatsData(colStatsData);
+    return statsObj;
+  }
+
+  public static ColumnStatisticsDesc getPartitionColumnStatisticsDesc(
+    MPartitionColumnStatistics mStatsObj) {
+    ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc();
+    statsDesc.setIsTblLevel(false);
+    statsDesc.setDbName(mStatsObj.getDbName());
+    statsDesc.setTableName(mStatsObj.getTableName());
+    statsDesc.setPartName(mStatsObj.getPartitionName());
+    statsDesc.setLastAnalyzed(mStatsObj.getLastAnalyzed());
+    return statsDesc;
+  }
+
+  // JAVA
+  public static void fillColumnStatisticsData(String colType, 
ColumnStatisticsData data,
+      Object llow, Object lhigh, Object dlow, Object dhigh, Object declow, 
Object dechigh,
+      Object nulls, Object dist, Object bitVector, Object avglen, Object 
maxlen, Object trues, Object falses) throws MetaException {
+    colType = colType.toLowerCase();
+    if (colType.equals("boolean")) {
+      BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
+      boolStats.setNumFalses(MetaStoreDirectSql.extractSqlLong(falses));
+      boolStats.setNumTrues(MetaStoreDirectSql.extractSqlLong(trues));
+      boolStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      data.setBooleanStats(boolStats);
+    } else if (colType.equals("string") ||
+        colType.startsWith("varchar") || colType.startsWith("char")) {
+      StringColumnStatsDataInspector stringStats = new 
StringColumnStatsDataInspector();
+      stringStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      stringStats.setAvgColLen(MetaStoreDirectSql.extractSqlDouble(avglen));
+      stringStats.setMaxColLen(MetaStoreDirectSql.extractSqlLong(maxlen));
+      stringStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      stringStats.setBitVectors(MetaStoreDirectSql.extractSqlBlob(bitVector));
+      data.setStringStats(stringStats);
+    } else if (colType.equals("binary")) {
+      BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
+      binaryStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      binaryStats.setAvgColLen(MetaStoreDirectSql.extractSqlDouble(avglen));
+      binaryStats.setMaxColLen(MetaStoreDirectSql.extractSqlLong(maxlen));
+      data.setBinaryStats(binaryStats);
+    } else if (colType.equals("bigint") || colType.equals("int") ||
+        colType.equals("smallint") || colType.equals("tinyint") ||
+        colType.equals("timestamp")) {
+      LongColumnStatsDataInspector longStats = new 
LongColumnStatsDataInspector();
+      longStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (lhigh != null) {
+        longStats.setHighValue(MetaStoreDirectSql.extractSqlLong(lhigh));
+      }
+      if (llow != null) {
+        longStats.setLowValue(MetaStoreDirectSql.extractSqlLong(llow));
+      }
+      longStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      longStats.setBitVectors(MetaStoreDirectSql.extractSqlBlob(bitVector));
+      data.setLongStats(longStats);
+    } else if (colType.equals("double") || colType.equals("float")) {
+      DoubleColumnStatsDataInspector doubleStats = new 
DoubleColumnStatsDataInspector();
+      doubleStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (dhigh != null) {
+        doubleStats.setHighValue(MetaStoreDirectSql.extractSqlDouble(dhigh));
+      }
+      if (dlow != null) {
+        doubleStats.setLowValue(MetaStoreDirectSql.extractSqlDouble(dlow));
+      }
+      doubleStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      doubleStats.setBitVectors(MetaStoreDirectSql.extractSqlBlob(bitVector));
+      data.setDoubleStats(doubleStats);
+    } else if (colType.startsWith("decimal")) {
+      DecimalColumnStatsDataInspector decimalStats = new 
DecimalColumnStatsDataInspector();
+      decimalStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (dechigh != null) {
+        decimalStats.setHighValue(createThriftDecimal((String)dechigh));
+      }
+      if (declow != null) {
+        decimalStats.setLowValue(createThriftDecimal((String)declow));
+      }
+      decimalStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      decimalStats.setBitVectors(MetaStoreDirectSql.extractSqlBlob(bitVector));
+      data.setDecimalStats(decimalStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsDataInspector dateStats = new 
DateColumnStatsDataInspector();
+      dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (lhigh != null) {
+        dateStats.setHighValue(new 
Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
+      }
+      if (llow != null) {
+        dateStats.setLowValue(new 
Date(MetaStoreDirectSql.extractSqlLong(llow)));
+      }
+      dateStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      dateStats.setBitVectors(MetaStoreDirectSql.extractSqlBlob(bitVector));
+      data.setDateStats(dateStats);
+    }
+  }
+
+  //DB
+  public static void fillColumnStatisticsData(String colType, 
ColumnStatisticsData data,
+      Object llow, Object lhigh, Object dlow, Object dhigh, Object declow, 
Object dechigh,
+      Object nulls, Object dist, Object avglen, Object maxlen, Object trues, 
Object falses,
+      Object avgLong, Object avgDouble, Object avgDecimal, Object sumDist,
+      boolean useDensityFunctionForNDVEstimation, double ndvTuner) throws 
MetaException {
+    colType = colType.toLowerCase();
+    if (colType.equals("boolean")) {
+      BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
+      boolStats.setNumFalses(MetaStoreDirectSql.extractSqlLong(falses));
+      boolStats.setNumTrues(MetaStoreDirectSql.extractSqlLong(trues));
+      boolStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      data.setBooleanStats(boolStats);
+    } else if (colType.equals("string") || colType.startsWith("varchar")
+        || colType.startsWith("char")) {
+      StringColumnStatsDataInspector stringStats = new 
StringColumnStatsDataInspector();
+      stringStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      stringStats.setAvgColLen(MetaStoreDirectSql.extractSqlDouble(avglen));
+      stringStats.setMaxColLen(MetaStoreDirectSql.extractSqlLong(maxlen));
+      stringStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      data.setStringStats(stringStats);
+    } else if (colType.equals("binary")) {
+      BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
+      binaryStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      binaryStats.setAvgColLen(MetaStoreDirectSql.extractSqlDouble(avglen));
+      binaryStats.setMaxColLen(MetaStoreDirectSql.extractSqlLong(maxlen));
+      data.setBinaryStats(binaryStats);
+    } else if (colType.equals("bigint") || colType.equals("int") || 
colType.equals("smallint")
+        || colType.equals("tinyint") || colType.equals("timestamp")) {
+      LongColumnStatsDataInspector longStats = new 
LongColumnStatsDataInspector();
+      longStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (lhigh != null) {
+        longStats.setHighValue(MetaStoreDirectSql.extractSqlLong(lhigh));
+      }
+      if (llow != null) {
+        longStats.setLowValue(MetaStoreDirectSql.extractSqlLong(llow));
+      }
+      long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
+      long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
+      long rangeBound = Long.MAX_VALUE;
+      if (lhigh != null && llow != null) {
+        rangeBound = MetaStoreDirectSql.extractSqlLong(lhigh)
+            - MetaStoreDirectSql.extractSqlLong(llow) + 1;
+      }
+      long estimation;
+      if (useDensityFunctionForNDVEstimation && lhigh != null && llow != null 
&& avgLong != null
+          && MetaStoreDirectSql.extractSqlDouble(avgLong) != 0.0) {
+        // We have estimation, lowerbound and higherbound. We use estimation if
+        // it is between lowerbound and higherbound.
+        estimation = MetaStoreDirectSql
+            .extractSqlLong((MetaStoreDirectSql.extractSqlLong(lhigh) - 
MetaStoreDirectSql
+                .extractSqlLong(llow)) / 
MetaStoreDirectSql.extractSqlDouble(avgLong));
+        if (estimation < lowerBound) {
+          estimation = lowerBound;
+        } else if (estimation > higherBound) {
+          estimation = higherBound;
+        }
+      } else {
+        estimation = (long) (lowerBound + (higherBound - lowerBound) * 
ndvTuner);
+      }
+      estimation = Math.min(estimation, rangeBound);
+      longStats.setNumDVs(estimation);
+      data.setLongStats(longStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsDataInspector dateStats = new 
DateColumnStatsDataInspector();
+      dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (lhigh != null) {
+        dateStats.setHighValue(new 
Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
+      }
+      if (llow != null) {
+        dateStats.setLowValue(new 
Date(MetaStoreDirectSql.extractSqlLong(llow)));
+      }
+      long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
+      long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
+      long rangeBound = Long.MAX_VALUE;
+      if (lhigh != null && llow != null) {
+        rangeBound = MetaStoreDirectSql.extractSqlLong(lhigh)
+            - MetaStoreDirectSql.extractSqlLong(llow) + 1;
+      }
+      long estimation;
+      if (useDensityFunctionForNDVEstimation && lhigh != null && llow != null 
&& avgLong != null
+          && MetaStoreDirectSql.extractSqlDouble(avgLong) != 0.0) {
+        // We have estimation, lowerbound and higherbound. We use estimation if
+        // it is between lowerbound and higherbound.
+        estimation = MetaStoreDirectSql
+            .extractSqlLong((MetaStoreDirectSql.extractSqlLong(lhigh) - 
MetaStoreDirectSql
+                .extractSqlLong(llow)) / 
MetaStoreDirectSql.extractSqlDouble(avgLong));
+        if (estimation < lowerBound) {
+          estimation = lowerBound;
+        } else if (estimation > higherBound) {
+          estimation = higherBound;
+        }
+      } else {
+        estimation = (long) (lowerBound + (higherBound - lowerBound) * 
ndvTuner);
+      }
+      estimation = Math.min(estimation, rangeBound);
+      dateStats.setNumDVs(estimation);
+      data.setDateStats(dateStats);
+    } else if (colType.equals("double") || colType.equals("float")) {
+      DoubleColumnStatsDataInspector doubleStats = new 
DoubleColumnStatsDataInspector();
+      doubleStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (dhigh != null) {
+        doubleStats.setHighValue(MetaStoreDirectSql.extractSqlDouble(dhigh));
+      }
+      if (dlow != null) {
+        doubleStats.setLowValue(MetaStoreDirectSql.extractSqlDouble(dlow));
+      }
+      long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
+      long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
+      if (useDensityFunctionForNDVEstimation && dhigh != null && dlow != null 
&& avgDouble != null
+          && MetaStoreDirectSql.extractSqlDouble(avgDouble) != 0.0) {
+        long estimation = MetaStoreDirectSql
+            .extractSqlLong((MetaStoreDirectSql.extractSqlLong(dhigh) - 
MetaStoreDirectSql
+                .extractSqlLong(dlow)) / 
MetaStoreDirectSql.extractSqlDouble(avgDouble));
+        if (estimation < lowerBound) {
+          doubleStats.setNumDVs(lowerBound);
+        } else if (estimation > higherBound) {
+          doubleStats.setNumDVs(higherBound);
+        } else {
+          doubleStats.setNumDVs(estimation);
+        }
+      } else {
+        doubleStats.setNumDVs((long) (lowerBound + (higherBound - lowerBound) 
* ndvTuner));
+      }
+      data.setDoubleStats(doubleStats);
+    } else if (colType.startsWith("decimal")) {
+      DecimalColumnStatsDataInspector decimalStats = new 
DecimalColumnStatsDataInspector();
+      decimalStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      Decimal low = null;
+      Decimal high = null;
+      BigDecimal blow = null;
+      BigDecimal bhigh = null;
+      if (dechigh instanceof BigDecimal) {
+        bhigh = (BigDecimal) dechigh;
+        high = new 
Decimal(ByteBuffer.wrap(bhigh.unscaledValue().toByteArray()),
+            (short) bhigh.scale());
+      } else if (dechigh instanceof String) {
+        bhigh = new BigDecimal((String) dechigh);
+        high = createThriftDecimal((String) dechigh);
+      }
+      decimalStats.setHighValue(high);
+      if (declow instanceof BigDecimal) {
+        blow = (BigDecimal) declow;
+        low = new Decimal(ByteBuffer.wrap(blow.unscaledValue().toByteArray()), 
(short) blow.scale());
+      } else if (dechigh instanceof String) {
+        blow = new BigDecimal((String) declow);
+        low = createThriftDecimal((String) declow);
+      }
+      decimalStats.setLowValue(low);
+      long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
+      long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
+      if (useDensityFunctionForNDVEstimation && dechigh != null && declow != 
null && avgDecimal != null
+          && MetaStoreDirectSql.extractSqlDouble(avgDecimal) != 0.0) {
+        long estimation = 
MetaStoreDirectSql.extractSqlLong(MetaStoreDirectSql.extractSqlLong(bhigh
+            .subtract(blow).floatValue() / 
MetaStoreDirectSql.extractSqlDouble(avgDecimal)));
+        if (estimation < lowerBound) {
+          decimalStats.setNumDVs(lowerBound);
+        } else if (estimation > higherBound) {
+          decimalStats.setNumDVs(higherBound);
+        } else {
+          decimalStats.setNumDVs(estimation);
+        }
+      } else {
+        decimalStats.setNumDVs((long) (lowerBound + (higherBound - lowerBound) 
* ndvTuner));
+      }
+      data.setDecimalStats(decimalStats);
+    }
+  }
+
+  public static Decimal createThriftDecimal(String s) {
+    BigDecimal d = new BigDecimal(s);
+    return new Decimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), 
(short)d.scale());
+  }
+
+  private static String createJdoDecimalString(Decimal d) {
+    return new BigDecimal(new BigInteger(d.getUnscaled()), 
d.getScale()).toString();
+  }
+
+  /**
+   * Set field values in oldStatObj from newStatObj
+   * @param oldStatObj
+   * @param newStatObj
+   */
+  public static void setFieldsIntoOldStats(ColumnStatisticsObj oldStatObj,
+      ColumnStatisticsObj newStatObj) {
+    _Fields typeNew = newStatObj.getStatsData().getSetField();
+    _Fields typeOld = oldStatObj.getStatsData().getSetField();
+    typeNew = typeNew == typeOld ? typeNew : null;
+    switch (typeNew) {
+    case BOOLEAN_STATS:
+      BooleanColumnStatsData oldBooleanStatsData = 
oldStatObj.getStatsData().getBooleanStats();
+      BooleanColumnStatsData newBooleanStatsData = 
newStatObj.getStatsData().getBooleanStats();
+      if (newBooleanStatsData.isSetNumTrues()) {
+        oldBooleanStatsData.setNumTrues(newBooleanStatsData.getNumTrues());
+      }
+      if (newBooleanStatsData.isSetNumFalses()) {
+        oldBooleanStatsData.setNumFalses(newBooleanStatsData.getNumFalses());
+      }
+      if (newBooleanStatsData.isSetNumNulls()) {
+        oldBooleanStatsData.setNumNulls(newBooleanStatsData.getNumNulls());
+      }
+      if (newBooleanStatsData.isSetBitVectors()) {
+        oldBooleanStatsData.setBitVectors(newBooleanStatsData.getBitVectors());
+      }
+      break;
+    case LONG_STATS: {
+      LongColumnStatsData oldLongStatsData = 
oldStatObj.getStatsData().getLongStats();
+      LongColumnStatsData newLongStatsData = 
newStatObj.getStatsData().getLongStats();
+      if (newLongStatsData.isSetHighValue()) {
+        oldLongStatsData.setHighValue(newLongStatsData.getHighValue());
+      }
+      if (newLongStatsData.isSetLowValue()) {
+        oldLongStatsData.setLowValue(newLongStatsData.getLowValue());
+      }
+      if (newLongStatsData.isSetNumNulls()) {
+        oldLongStatsData.setNumNulls(newLongStatsData.getNumNulls());
+      }
+      if (newLongStatsData.isSetNumDVs()) {
+        oldLongStatsData.setNumDVs(newLongStatsData.getNumDVs());
+      }
+      if (newLongStatsData.isSetBitVectors()) {
+        oldLongStatsData.setBitVectors(newLongStatsData.getBitVectors());
+      }
+      break;
+    }
+    case DOUBLE_STATS: {
+      DoubleColumnStatsData oldDoubleStatsData = 
oldStatObj.getStatsData().getDoubleStats();
+      DoubleColumnStatsData newDoubleStatsData = 
newStatObj.getStatsData().getDoubleStats();
+      if (newDoubleStatsData.isSetHighValue()) {
+        oldDoubleStatsData.setHighValue(newDoubleStatsData.getHighValue());
+      }
+      if (newDoubleStatsData.isSetLowValue()) {
+        oldDoubleStatsData.setLowValue(newDoubleStatsData.getLowValue());
+      }
+      if (newDoubleStatsData.isSetNumNulls()) {
+        oldDoubleStatsData.setNumNulls(newDoubleStatsData.getNumNulls());
+      }
+      if (newDoubleStatsData.isSetNumDVs()) {
+        oldDoubleStatsData.setNumDVs(newDoubleStatsData.getNumDVs());
+      }
+      if (newDoubleStatsData.isSetBitVectors()) {
+        oldDoubleStatsData.setBitVectors(newDoubleStatsData.getBitVectors());
+      }
+      break;
+    }
+    case STRING_STATS: {
+      StringColumnStatsData oldStringStatsData = 
oldStatObj.getStatsData().getStringStats();
+      StringColumnStatsData newStringStatsData = 
newStatObj.getStatsData().getStringStats();
+      if (newStringStatsData.isSetMaxColLen()) {
+        oldStringStatsData.setMaxColLen(newStringStatsData.getMaxColLen());
+      }
+      if (newStringStatsData.isSetAvgColLen()) {
+        oldStringStatsData.setAvgColLen(newStringStatsData.getAvgColLen());
+      }
+      if (newStringStatsData.isSetNumNulls()) {
+        oldStringStatsData.setNumNulls(newStringStatsData.getNumNulls());
+      }
+      if (newStringStatsData.isSetNumDVs()) {
+        oldStringStatsData.setNumDVs(newStringStatsData.getNumDVs());
+      }
+      if (newStringStatsData.isSetBitVectors()) {
+        oldStringStatsData.setBitVectors(newStringStatsData.getBitVectors());
+      }
+      break;
+    }
+    case BINARY_STATS:
+      BinaryColumnStatsData oldBinaryStatsData = 
oldStatObj.getStatsData().getBinaryStats();
+      BinaryColumnStatsData newBinaryStatsData = 
newStatObj.getStatsData().getBinaryStats();
+      if (newBinaryStatsData.isSetMaxColLen()) {
+        oldBinaryStatsData.setMaxColLen(newBinaryStatsData.getMaxColLen());
+      }
+      if (newBinaryStatsData.isSetAvgColLen()) {
+        oldBinaryStatsData.setAvgColLen(newBinaryStatsData.getAvgColLen());
+      }
+      if (newBinaryStatsData.isSetNumNulls()) {
+        oldBinaryStatsData.setNumNulls(newBinaryStatsData.getNumNulls());
+      }
+      if (newBinaryStatsData.isSetBitVectors()) {
+        oldBinaryStatsData.setBitVectors(newBinaryStatsData.getBitVectors());
+      }
+      break;
+    case DECIMAL_STATS: {
+      DecimalColumnStatsData oldDecimalStatsData = 
oldStatObj.getStatsData().getDecimalStats();
+      DecimalColumnStatsData newDecimalStatsData = 
newStatObj.getStatsData().getDecimalStats();
+      if (newDecimalStatsData.isSetHighValue()) {
+        oldDecimalStatsData.setHighValue(newDecimalStatsData.getHighValue());
+      }
+      if (newDecimalStatsData.isSetLowValue()) {
+        oldDecimalStatsData.setLowValue(newDecimalStatsData.getLowValue());
+      }
+      if (newDecimalStatsData.isSetNumNulls()) {
+        oldDecimalStatsData.setNumNulls(newDecimalStatsData.getNumNulls());
+      }
+      if (newDecimalStatsData.isSetNumDVs()) {
+        oldDecimalStatsData.setNumDVs(newDecimalStatsData.getNumDVs());
+      }
+      if (newDecimalStatsData.isSetBitVectors()) {
+        oldDecimalStatsData.setBitVectors(newDecimalStatsData.getBitVectors());
+      }
+      break;
+    }
+    case DATE_STATS: {
+      DateColumnStatsData oldDateStatsData = 
oldStatObj.getStatsData().getDateStats();
+      DateColumnStatsData newDateStatsData = 
newStatObj.getStatsData().getDateStats();
+      if (newDateStatsData.isSetHighValue()) {
+        oldDateStatsData.setHighValue(newDateStatsData.getHighValue());
+      }
+      if (newDateStatsData.isSetLowValue()) {
+        oldDateStatsData.setLowValue(newDateStatsData.getLowValue());
+      }
+      if (newDateStatsData.isSetNumNulls()) {
+        oldDateStatsData.setNumNulls(newDateStatsData.getNumNulls());
+      }
+      if (newDateStatsData.isSetNumDVs()) {
+        oldDateStatsData.setNumDVs(newDateStatsData.getNumDVs());
+      }
+      if (newDateStatsData.isSetBitVectors()) {
+        oldDateStatsData.setBitVectors(newDateStatsData.getBitVectors());
+      }
+      break;
+    }
+    default:
+      throw new IllegalArgumentException("Unknown stats type: " + 
typeNew.toString());
+    }
+  }
+}

Reply via email to