Repository: hive
Updated Branches:
  refs/heads/master bbb312f36 -> a014cffed


HIVE-12005 : Remove hbase based stats collection mechanism (Ashutosh Chauhan 
via Prasanth J)

Signed-off-by: Ashutosh Chauhan <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a014cffe
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a014cffe
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a014cffe

Branch: refs/heads/master
Commit: a014cffed990f8f499d797453f50f436b94fd280
Parents: bbb312f
Author: Ashutosh Chauhan <[email protected]>
Authored: Wed Sep 30 18:15:07 2015 -0700
Committer: Ashutosh Chauhan <[email protected]>
Committed: Sat Oct 3 11:36:23 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/common/StatsSetupConst.java     |   8 -
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   4 +-
 .../hadoop/hive/hbase/HBaseStatsAggregator.java | 128 --------
 .../hadoop/hive/hbase/HBaseStatsPublisher.java  | 154 ---------
 .../hive/hbase/HBaseStatsSetupConstants.java    |  34 --
 .../hadoop/hive/hbase/HBaseStatsUtils.java      | 135 --------
 .../src/test/queries/positive/hbase_stats.q     |  30 --
 .../src/test/queries/positive/hbase_stats2.q    |  31 --
 .../positive/hbase_stats_empty_partition.q      |  13 -
 .../src/test/results/positive/hbase_stats.q.out | 311 -------------------
 .../test/results/positive/hbase_stats2.q.out    | 311 -------------------
 .../positive/hbase_stats_empty_partition.q.out  |  63 ----
 12 files changed, 2 insertions(+), 1220 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java 
b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
index c70cdfb..b16b231 100644
--- a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
+++ b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
@@ -31,14 +31,6 @@ import java.util.Map;
 public class StatsSetupConst {
 
   public enum StatDB {
-    hbase {
-      @Override
-      public String getPublisher(Configuration conf) {
-        return "org.apache.hadoop.hive.hbase.HBaseStatsPublisher"; }
-      @Override
-      public String getAggregator(Configuration conf) {
-        return "org.apache.hadoop.hive.hbase.HBaseStatsAggregator"; }
-    },
     jdbc {
       @Override
       public String getPublisher(Configuration conf) {

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 77ca613..33ef654 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1341,11 +1341,11 @@ public class HiveConf extends Configuration {
     // Statistics
     HIVESTATSAUTOGATHER("hive.stats.autogather", true,
         "A flag to gather statistics automatically during the INSERT OVERWRITE 
command."),
-    HIVESTATSDBCLASS("hive.stats.dbclass", "fs", new PatternSet("jdbc(:.*)", 
"hbase", "counter", "custom", "fs"),
+    HIVESTATSDBCLASS("hive.stats.dbclass", "fs", new PatternSet("jdbc(:.*)", 
"counter", "custom", "fs"),
         "The storage that stores temporary Hive statistics. In filesystem 
based statistics collection ('fs'), \n" +
         "each task writes statistics it has collected in a file on the 
filesystem, which will be aggregated \n" +
         "after the job has finished. Supported values are fs (filesystem), 
jdbc:database (where database \n" +
-        "can be derby, mysql, etc.), hbase, counter, and custom as defined in 
StatsSetupConst.java."), // StatsSetupConst.StatDB
+        "can be derby, mysql, etc.), counter, and custom as defined in 
StatsSetupConst.java."), // StatsSetupConst.StatDB
     HIVESTATSJDBCDRIVER("hive.stats.jdbcdriver",
         "org.apache.derby.jdbc.EmbeddedDriver",
         "The JDBC driver for the database that stores temporary Hive 
statistics."),

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsAggregator.java
----------------------------------------------------------------------
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsAggregator.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsAggregator.java
deleted file mode 100644
index 1b96232..0000000
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsAggregator.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hbase;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.stats.StatsAggregator;
-
-
-/**
- * A class that implements the StatsAggregator interface through HBase.
- */
-public class HBaseStatsAggregator implements StatsAggregator {
-
-  private HTable htable;
-  private final Log LOG = LogFactory.getLog(this.getClass().getName());
-
-  /**
-   * Does the necessary HBase initializations.
-   */
-  public boolean connect(Configuration hiveconf, Task sourceTask) {
-
-    try {
-      htable = new HTable(HBaseConfiguration.create(hiveconf),
-        HBaseStatsSetupConstants.PART_STAT_TABLE_NAME);
-
-      return true;
-    } catch (IOException e) {
-      LOG.error("Error during HBase connection. ", e);
-      return false;
-    }
-  }
-
-  /**
-   * Aggregates temporary stats from HBase;
-   */
-  public String aggregateStats(String rowID, String key) {
-
-    byte[] family, column;
-    if (!HBaseStatsUtils.isValidStatistic(key)) {
-      LOG.warn("Warning. Invalid statistic: " + key + ", supported stats: " +
-          HBaseStatsUtils.getSupportedStatistics());
-      return null;
-    }
-
-    family = HBaseStatsUtils.getFamilyName();
-    column = HBaseStatsUtils.getColumnName(key);
-
-    try {
-
-      long retValue = 0;
-      Scan scan = new Scan();
-      scan.addColumn(family, column);
-      // Filter the row by its ID
-      // The complete key is "tableName/PartSpecs/jobID/taskID"
-      // This is a prefix filter, the prefix is "tableName/PartSpecs/JobID", 
i.e. the taskID is
-      // ignored. In SQL, this is equivalent to
-      // "Select * FROM tableName where ID LIKE 'tableName/PartSpecs/JobID%';"
-      PrefixFilter filter = new PrefixFilter(Bytes.toBytes(rowID));
-      scan.setFilter(filter);
-      ResultScanner scanner = htable.getScanner(scan);
-
-      for (Result result : scanner) {
-        retValue += Long.parseLong(Bytes.toString(result.getValue(family, 
column)));
-      }
-      return Long.toString(retValue);
-    } catch (IOException e) {
-      LOG.error("Error during publishing aggregation. ", e);
-      return null;
-    }
-  }
-
-  public boolean closeConnection() {
-    return true;
-  }
-
-  public boolean cleanUp(String rowID) {
-    try {
-      Scan scan = new Scan();
-      // Filter the row by its ID
-      // The complete key is "tableName/PartSpecs/jobID/taskID"
-      // This is a prefix filter, the prefix is "JobID"
-      // In SQL, this is equivalent to "Select * FROM tableName where ID LIKE 
'JobID%';"
-      PrefixFilter filter = new PrefixFilter(Bytes.toBytes(rowID));
-      scan.setFilter(filter);
-      ResultScanner scanner = htable.getScanner(scan);
-      ArrayList<Delete> toDelete = new ArrayList<Delete>();
-      for (Result result : scanner) {
-        Delete delete = new Delete(result.getRow());
-        toDelete.add(delete);
-      }
-      htable.delete(toDelete);
-      return true;
-    } catch (IOException e) {
-      LOG.error("Error during publishing aggregation. ", e);
-      return false;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsPublisher.java
----------------------------------------------------------------------
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsPublisher.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsPublisher.java
deleted file mode 100644
index 8266b33..0000000
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsPublisher.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hbase;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hive.ql.stats.StatsPublisher;
-
-/**
- * A class that implements the StatsPublisher interface through HBase.
- */
-public class HBaseStatsPublisher implements StatsPublisher {
-
-  private HTable htable;
-  private final Log LOG = LogFactory.getLog(this.getClass().getName());
-
-  /**
-   * Does the necessary HBase initializations.
-   */
-  public boolean connect(Configuration hiveconf) {
-
-    try {
-      htable = new HTable(HBaseConfiguration.create(hiveconf),
-        HBaseStatsSetupConstants.PART_STAT_TABLE_NAME);
-      // for performance reason, defer update until the closeConnection
-      htable.setAutoFlush(false);
-    } catch (IOException e) {
-      LOG.error("Error during HBase connection. " + e);
-      return false;
-    }
-
-    return true;
-  }
-
-  /**
-   * Writes temporary statistics into HBase;
-   */
-  public boolean publishStat(String rowID, Map<String, String> stats) {
-
-    // Write in HBase
-
-    if (stats.isEmpty()) {
-      // If there are no stats to publish, nothing to do.
-      return true;
-    }
-
-    if (!HBaseStatsUtils.isValidStatisticSet(stats.keySet())) {
-      LOG.warn("Warning. Invalid statistic: " + stats.keySet().toString()
-          + ", supported stats: "
-          + HBaseStatsUtils.getSupportedStatistics());
-      return false;
-    }
-
-    try {
-
-      // check the basic stat (e.g., row_count)
-
-      Get get = new Get(Bytes.toBytes(rowID));
-      Result result = htable.get(get);
-
-      byte[] family = HBaseStatsUtils.getFamilyName();
-      byte[] column = 
HBaseStatsUtils.getColumnName(HBaseStatsUtils.getBasicStat());
-
-      long val = 
Long.parseLong(HBaseStatsUtils.getStatFromMap(HBaseStatsUtils.getBasicStat(),
-          stats));
-      long oldVal = 0;
-
-      if (!result.isEmpty()) {
-        oldVal = Long.parseLong(Bytes.toString(result.getValue(family, 
column)));
-      }
-
-      if (oldVal >= val) {
-        return true; // we do not need to publish anything
-      }
-
-      // we need to update
-      Put row = new Put(Bytes.toBytes(rowID));
-      for (String statType : HBaseStatsUtils.getSupportedStatistics()) {
-        column = HBaseStatsUtils.getColumnName(statType);
-        row.add(family, column, 
Bytes.toBytes(HBaseStatsUtils.getStatFromMap(statType, stats)));
-      }
-
-      htable.put(row);
-      return true;
-
-    } catch (IOException e) {
-      LOG.error("Error during publishing statistics. " + e);
-      return false;
-    }
-  }
-
-  public boolean closeConnection() {
-    // batch update
-    try {
-      htable.flushCommits();
-      return true;
-    } catch (IOException e) {
-      LOG.error("Cannot commit changes in stats publishing.", e);
-      return false;
-    }
-  }
-
-
-  /**
-   * Does the necessary HBase initializations.
-   */
-  public boolean init(Configuration hiveconf) {
-    try {
-      HBaseAdmin hbase = new HBaseAdmin(HBaseConfiguration.create(hiveconf));
-
-      // Creating table if not exists
-      if (!hbase.tableExists(HBaseStatsSetupConstants.PART_STAT_TABLE_NAME)) {
-        HTableDescriptor table = new 
HTableDescriptor(HBaseStatsSetupConstants.PART_STAT_TABLE_NAME);
-        HColumnDescriptor family = new 
HColumnDescriptor(HBaseStatsUtils.getFamilyName());
-        table.addFamily(family);
-        hbase.createTable(table);
-      }
-    } catch (IOException e) {
-      LOG.error("Error during HBase initialization. " + e);
-      return false;
-    }
-
-    return true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsSetupConstants.java
----------------------------------------------------------------------
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsSetupConstants.java
 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsSetupConstants.java
deleted file mode 100644
index af97edf..0000000
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsSetupConstants.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hbase;
-
-public final class HBaseStatsSetupConstants {
-
-  public static final String PART_STAT_TABLE_NAME = "PARTITION_STAT_TBL";
-
-  public static final String PART_STAT_COLUMN_FAMILY = "PARTITION_STAT_FAMILY";
-
-  //supported stats
-
-  public static final String PART_STAT_ROW_COUNT_COLUMN_NAME = "ROW_COUNT";
-
-  public static final String PART_STAT_RAW_DATA_SIZE_COLUMN_NAME = 
"RAW_DATA_SIZE";
-
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java
deleted file mode 100644
index 6e8fac5..0000000
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hbase;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hive.common.StatsSetupConst;
-
-
-
-public class HBaseStatsUtils {
-
-  private static final List<String> supportedStats = new ArrayList<String>();
-  private static final Map<String, String> columnNameMapping = new 
HashMap<String, String>();
-
-  static {
-    // supported statistics
-    supportedStats.add(StatsSetupConst.ROW_COUNT);
-    supportedStats.add(StatsSetupConst.RAW_DATA_SIZE);
-
-    // row count statistics
-    columnNameMapping.put(StatsSetupConst.ROW_COUNT,
-        HBaseStatsSetupConstants.PART_STAT_ROW_COUNT_COLUMN_NAME);
-
-    // raw data size
-    columnNameMapping.put(StatsSetupConst.RAW_DATA_SIZE,
-        HBaseStatsSetupConstants.PART_STAT_RAW_DATA_SIZE_COLUMN_NAME);
-
-  }
-
-  /**
-   * Returns the set of supported statistics
-   */
-  public static List<String> getSupportedStatistics() {
-    return supportedStats;
-  }
-
-  /**
-   * Retrieves the value for a particular stat from the published map.
-   *
-   * @param statType
-   *          - statistic type to be retrieved from the map
-   * @param stats
-   *          - stats map
-   * @return value for the given statistic as string, "0" if the statistic is 
not present
-   */
-  public static String getStatFromMap(String statType, Map<String, String> 
stats) {
-    String value = stats.get(statType);
-    if (value == null) {
-      return "0";
-    }
-    return value;
-  }
-
-  /**
-   * Check if the set to be published is within the supported statistics.
-   * It must also contain at least the basic statistics (used for comparison).
-   *
-   * @param stats
-   *          - stats to be published
-   * @return true if is a valid statistic set, false otherwise
-   */
-
-  public static boolean isValidStatisticSet(Collection<String> stats) {
-    if(!stats.contains(getBasicStat())) {
-      return false;
-    }
-    for (String stat : stats) {
-      if (!supportedStats.contains(stat)) {
-        return false;
-      }
-    }
-    return true;
-  }
-
-  /**
-   * Check if a particular statistic type is supported
-   *
-   * @param statType
-   *          - statistic to be published
-   * @return true if statType is supported, false otherwise
-   */
-  public static boolean isValidStatistic(String statType) {
-    return supportedStats.contains(statType);
-  }
-
-  /**
-   * Returns the HBase column where the statistics for the given type are 
stored.
-   *
-   * @param statType
-   *          - supported statistic.
-   * @return column name for the given statistic.
-   */
-  public static byte[] getColumnName(String statType) {
-    return Bytes.toBytes(columnNameMapping.get(statType));
-  }
-
-  /**
-   * Returns the family name for stored statistics.
-   */
-  public static byte[] getFamilyName() {
-    return Bytes.toBytes(HBaseStatsSetupConstants.PART_STAT_COLUMN_FAMILY);
-  }
-
-  /**
-   * Returns the basic type of the supported statistics.
-   * It is used to determine which statistics are fresher.
-   */
-
-  public static String getBasicStat() {
-    return supportedStats.get(0);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/test/queries/positive/hbase_stats.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_stats.q 
b/hbase-handler/src/test/queries/positive/hbase_stats.q
deleted file mode 100644
index 3350dde..0000000
--- a/hbase-handler/src/test/queries/positive/hbase_stats.q
+++ /dev/null
@@ -1,30 +0,0 @@
-set datanucleus.cache.collections=false;
-set hive.stats.autogather=true;
-set hive.stats.atomic=false;
-
-set hive.stats.dbclass=hbase;
-
-create table stats_src like src;
-insert overwrite table stats_src select * from src;
-analyze table stats_src compute statistics;
-desc formatted stats_src;
-
-create table stats_part like srcpart;
-
-insert overwrite table stats_part partition (ds='2010-04-08', hr = '11') 
select key, value from src;
-insert overwrite table stats_part partition (ds='2010-04-08', hr = '12') 
select key, value from src;
-
-analyze table stats_part partition(ds='2010-04-08', hr='11') compute 
statistics;
-analyze table stats_part partition(ds='2010-04-08', hr='12') compute 
statistics;
-
-insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') 
select key, value from src;
-
-desc formatted stats_part;
-desc formatted stats_part partition (ds='2010-04-08', hr = '11');
-desc formatted stats_part partition (ds='2010-04-08', hr = '12');
-
-analyze table stats_part partition(ds, hr) compute statistics;
-desc formatted stats_part;
-
-drop table stats_src;
-drop table stats_part;

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/test/queries/positive/hbase_stats2.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_stats2.q 
b/hbase-handler/src/test/queries/positive/hbase_stats2.q
deleted file mode 100644
index f6c71c3..0000000
--- a/hbase-handler/src/test/queries/positive/hbase_stats2.q
+++ /dev/null
@@ -1,31 +0,0 @@
-set datanucleus.cache.collections=false;
-set hive.stats.autogather=true;
-set hive.stats.atomic=false;
-set hive.stats.collect.rawdatasize=false;
-
-set hive.stats.dbclass=hbase;
-
-create table stats_src like src;
-insert overwrite table stats_src select * from src;
-analyze table stats_src compute statistics;
-desc formatted stats_src;
-
-create table stats_part like srcpart;
-
-insert overwrite table stats_part partition (ds='2010-04-08', hr = '11') 
select key, value from src;
-insert overwrite table stats_part partition (ds='2010-04-08', hr = '12') 
select key, value from src;
-
-analyze table stats_part partition(ds='2010-04-08', hr='11') compute 
statistics;
-analyze table stats_part partition(ds='2010-04-08', hr='12') compute 
statistics;
-
-insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') 
select key, value from src;
-
-desc formatted stats_part;
-desc formatted stats_part partition (ds='2010-04-08', hr = '11');
-desc formatted stats_part partition (ds='2010-04-08', hr = '12');
-
-analyze table stats_part partition(ds, hr) compute statistics;
-desc formatted stats_part;
-
-drop table stats_src;
-drop table stats_part;

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/test/queries/positive/hbase_stats_empty_partition.q
----------------------------------------------------------------------
diff --git 
a/hbase-handler/src/test/queries/positive/hbase_stats_empty_partition.q 
b/hbase-handler/src/test/queries/positive/hbase_stats_empty_partition.q
deleted file mode 100644
index 24f4616..0000000
--- a/hbase-handler/src/test/queries/positive/hbase_stats_empty_partition.q
+++ /dev/null
@@ -1,13 +0,0 @@
--- This test verifies that writing an empty partition succeeds when
--- hive.stats.reliable is set to true.
-
-create table tmptable(key string, value string) partitioned by (part string);
-
-set hive.stats.autogather=true;
-set hive.stats.reliable=true;
-
-set hive.stats.dbclass=hbase;
-
-insert overwrite table tmptable partition (part = '1') select * from src where 
key = 'no_such_value';
-
-describe formatted tmptable partition (part = '1');

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/test/results/positive/hbase_stats.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_stats.q.out 
b/hbase-handler/src/test/results/positive/hbase_stats.q.out
deleted file mode 100644
index f34720d..0000000
--- a/hbase-handler/src/test/results/positive/hbase_stats.q.out
+++ /dev/null
@@ -1,311 +0,0 @@
-PREHOOK: query: create table stats_src like src
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: create table stats_src like src
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_src
-PREHOOK: query: insert overwrite table stats_src select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: insert overwrite table stats_src select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_src
-POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, 
type:string, comment:default), ]
-POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
-PREHOOK: query: analyze table stats_src compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_src
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: analyze table stats_src compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_src
-POSTHOOK: Output: default@stats_src
-PREHOOK: query: desc formatted stats_src
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_src
-POSTHOOK: query: desc formatted stats_src
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_src
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 500                 
-       rawDataSize             5312                
-       totalSize               5812                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: create table stats_part like srcpart
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_part
-POSTHOOK: query: create table stats_part like srcpart
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_part
-PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '11') select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '11') select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '12') select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '12') select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') 
compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_part
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: Output: default@stats_part
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') 
compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Output: default@stats_part
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') 
compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_part
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: Output: default@stats_part
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') 
compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Output: default@stats_part
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '13') select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '13') select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc formatted stats_part
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Partition Information                
-Partition Value:       [2010-04-08, 11]         
-Database:              default                  
-Table:                 stats_part               
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 500                 
-       rawDataSize             5312                
-       totalSize               5812                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Partition Information                
-Partition Value:       [2010-04-08, 12]         
-Database:              default                  
-Table:                 stats_part               
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 500                 
-       rawDataSize             5312                
-       totalSize               5812                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_part
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=13
-PREHOOK: Output: default@stats_part
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: Output: default@stats_part
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-PREHOOK: query: desc formatted stats_part
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: drop table stats_src
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_src
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: drop table stats_src
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_src
-POSTHOOK: Output: default@stats_src
-PREHOOK: query: drop table stats_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_part
-PREHOOK: Output: default@stats_part
-POSTHOOK: query: drop table stats_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Output: default@stats_part

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/test/results/positive/hbase_stats2.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_stats2.q.out 
b/hbase-handler/src/test/results/positive/hbase_stats2.q.out
deleted file mode 100644
index aad2e3a..0000000
--- a/hbase-handler/src/test/results/positive/hbase_stats2.q.out
+++ /dev/null
@@ -1,311 +0,0 @@
-PREHOOK: query: create table stats_src like src
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: create table stats_src like src
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_src
-PREHOOK: query: insert overwrite table stats_src select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: insert overwrite table stats_src select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_src
-POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, 
type:string, comment:default), ]
-POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
-PREHOOK: query: analyze table stats_src compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_src
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: analyze table stats_src compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_src
-POSTHOOK: Output: default@stats_src
-PREHOOK: query: desc formatted stats_src
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_src
-POSTHOOK: query: desc formatted stats_src
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_src
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 500                 
-       rawDataSize             0                   
-       totalSize               5812                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: create table stats_part like srcpart
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_part
-POSTHOOK: query: create table stats_part like srcpart
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_part
-PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '11') select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '11') select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '12') select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '12') select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') 
compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_part
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: Output: default@stats_part
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') 
compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Output: default@stats_part
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') 
compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_part
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: Output: default@stats_part
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') 
compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Output: default@stats_part
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '13') select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', 
hr = '13') select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc formatted stats_part
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Partition Information                
-Partition Value:       [2010-04-08, 11]         
-Database:              default                  
-Table:                 stats_part               
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 500                 
-       rawDataSize             0                   
-       totalSize               5812                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr = 
'12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Partition Information                
-Partition Value:       [2010-04-08, 12]         
-Database:              default                  
-Table:                 stats_part               
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 500                 
-       rawDataSize             0                   
-       totalSize               5812                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_part
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=13
-PREHOOK: Output: default@stats_part
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: Output: default@stats_part
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
-POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-PREHOOK: query: desc formatted stats_part
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_part
-POSTHOOK: query: desc formatted stats_part
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_part
-# col_name             data_type               comment             
-                
-key                    string                  default             
-value                  string                  default             
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-hr                     string                                      
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: drop table stats_src
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_src
-PREHOOK: Output: default@stats_src
-POSTHOOK: query: drop table stats_src
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_src
-POSTHOOK: Output: default@stats_src
-PREHOOK: query: drop table stats_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_part
-PREHOOK: Output: default@stats_part
-POSTHOOK: query: drop table stats_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_part
-POSTHOOK: Output: default@stats_part

http://git-wip-us.apache.org/repos/asf/hive/blob/a014cffe/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out
----------------------------------------------------------------------
diff --git 
a/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out 
b/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out
deleted file mode 100644
index c13817e..0000000
--- a/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out
+++ /dev/null
@@ -1,63 +0,0 @@
-PREHOOK: query: -- This test verifies that writing an empty partition succeeds 
when
--- hive.stats.reliable is set to true.
-
-create table tmptable(key string, value string) partitioned by (part string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@tmptable
-POSTHOOK: query: -- This test verifies that writing an empty partition 
succeeds when
--- hive.stats.reliable is set to true.
-
-create table tmptable(key string, value string) partitioned by (part string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@tmptable
-PREHOOK: query: insert overwrite table tmptable partition (part = '1') select 
* from src where key = 'no_such_value'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@tmptable@part=1
-POSTHOOK: query: insert overwrite table tmptable partition (part = '1') select 
* from src where key = 'no_such_value'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@tmptable@part=1
-POSTHOOK: Lineage: tmptable PARTITION(part=1).key SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: tmptable PARTITION(part=1).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe formatted tmptable partition (part = '1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@tmptable
-POSTHOOK: query: describe formatted tmptable partition (part = '1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@tmptable
-# col_name             data_type               comment             
-                
-key                    string                                      
-value                  string                                      
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-part                   string                                      
-                
-# Detailed Partition Information                
-Partition Value:       [1]                      
-Database:              default                  
-Table:                 tmptable                 
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   true                
-       numFiles                1                   
-       numRows                 0                   
-       rawDataSize             0                   
-       totalSize               0                   
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   

Reply via email to