This is an automated email from the ASF dual-hosted git repository.

karanmehta93 pushed a commit to branch 4.x-HBase-1.4
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x-HBase-1.4 by this push:
     new 03dd0d1  PHOENIX-5091 Add new features to UpdateStatisticsTool (#430)
03dd0d1 is described below

commit 03dd0d1434bbc9fff5c3bd77e94a54411fd07528
Author: karanmehta93 <karanmeht...@gmail.com>
AuthorDate: Mon Jan 28 11:45:04 2019 -0800

    PHOENIX-5091 Add new features to UpdateStatisticsTool (#430)
---
 .../phoenix/schema/stats/BaseStatsCollectorIT.java |  65 +++++++---
 .../org/apache/phoenix/query/GuidePostsCache.java  |   5 +-
 .../phoenix/schema/stats/StatisticsUtil.java       |   3 +-
 .../phoenix/schema/stats/UpdateStatisticsTool.java | 142 ++++++++++++++++-----
 .../schema/stats/UpdateStatisticsToolTest.java     |  76 +++++++++++
 5 files changed, 235 insertions(+), 56 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/BaseStatsCollectorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/BaseStatsCollectorIT.java
index 78c4faf..2344bd0 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/BaseStatsCollectorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/BaseStatsCollectorIT.java
@@ -27,6 +27,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.sql.Array;
@@ -40,20 +41,21 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Random;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
@@ -190,11 +192,8 @@ public abstract class BaseStatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
     private void collectStatistics(Connection conn, String fullTableName,
                                    String guidePostWidth) throws Exception {
 
-        String localPhysicalTableName = 
SchemaUtil.getPhysicalTableName(fullTableName.getBytes(),
-                userTableNamespaceMapped).getNameAsString();
-
         if (collectStatsOnSnapshot) {
-            collectStatsOnSnapshot(conn, fullTableName, guidePostWidth, 
localPhysicalTableName);
+            collectStatsOnSnapshot(conn, fullTableName, guidePostWidth);
             invalidateStats(conn, fullTableName);
         } else {
             String updateStatisticsSql = "UPDATE STATISTICS " + fullTableName;
@@ -207,20 +206,44 @@ public abstract class BaseStatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
     }
 
     private void collectStatsOnSnapshot(Connection conn, String fullTableName,
-                                        String guidePostWidth, String 
localPhysicalTableName) throws Exception {
-        UpdateStatisticsTool tool = new UpdateStatisticsTool();
-        Configuration conf = utility.getConfiguration();
-        HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-        String snapshotName = "UpdateStatisticsTool_" + generateUniqueName();
-        admin.snapshot(snapshotName, localPhysicalTableName);
-        LOG.info("Successfully created snapshot " + snapshotName + " for " + 
localPhysicalTableName);
-        Path randomDir = getUtility().getRandomDir();
+                                        String guidePostWidth) throws 
Exception {
         if (guidePostWidth != null) {
             conn.createStatement().execute("ALTER TABLE " + fullTableName + " 
SET GUIDE_POSTS_WIDTH = " + guidePostWidth);
         }
-        Job job = tool.configureJob(conf, fullTableName, snapshotName, 
randomDir);
-        assertEquals(job.getConfiguration().get(MAPREDUCE_JOB_TYPE), 
UPDATE_STATS.name());
-        tool.runJob(job, true);
+        runUpdateStatisticsTool(fullTableName);
+    }
+
+    // Run UpdateStatisticsTool in foreground with manage snapshot option
+    private void runUpdateStatisticsTool(String fullTableName) {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.setConf(utility.getConfiguration());
+        String randomDir = getUtility().getRandomDir().toString();
+        final String[] cmdArgs = getArgValues(fullTableName, randomDir);
+        try {
+            int status = tool.run(cmdArgs);
+            assertEquals("MR Job should complete successfully", 0, status);
+            HBaseAdmin hBaseAdmin = utility.getHBaseAdmin();
+            assertEquals("Snapshot should be automatically deleted when 
UpdateStatisticsTool has completed",
+                    0, 
hBaseAdmin.listSnapshots(tool.getSnapshotName()).size());
+        } catch (Exception e) {
+            fail("Exception when running UpdateStatisticsTool for " + 
tableName + " Exception: " + e);
+        } finally {
+            Job job = tool.getJob();
+            assertEquals("MR Job should have been configured with UPDATE_STATS 
job type",
+                    job.getConfiguration().get(MAPREDUCE_JOB_TYPE), 
UPDATE_STATS.name());
+        }
+    }
+
+    private String[] getArgValues(String fullTableName, String randomDir) {
+        final List<String> args = Lists.newArrayList();
+        args.add("-t");
+        args.add(fullTableName);
+        args.add("-d");
+        args.add(randomDir);
+        args.add("-runfg");
+        args.add("-cs");
+        args.add("-ds");
+        return args.toArray(new String[0]);
     }
 
     @Test
@@ -480,7 +503,7 @@ public abstract class BaseStatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
         Scan scan = new Scan();
         scan.setRaw(true);
         PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
-        try (HTableInterface htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
+        try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
             ResultScanner scanner = htable.getScanner(scan);
             Result result;
             while ((result = scanner.next())!=null) {
@@ -493,7 +516,7 @@ public abstract class BaseStatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
         scan = new Scan();
         scan.setRaw(true);
         phxConn = conn.unwrap(PhoenixConnection.class);
-        try (HTableInterface htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
+        try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
             ResultScanner scanner = htable.getScanner(scan);
             Result result;
             while ((result = scanner.next())!=null) {
@@ -718,7 +741,7 @@ public abstract class BaseStatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
     private void verifyGuidePostGenerated(ConnectionQueryServices 
queryServices,
             String tableName, String[] familyNames,
             long guidePostWidth, boolean emptyGuidePostExpected) throws 
Exception {
-        try (HTableInterface statsHTable =
+        try (Table statsHTable =
                 queryServices.getTable(
                         
SchemaUtil.getPhysicalName(PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES,
                                 queryServices.getProps()).getName())) {
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/GuidePostsCache.java 
b/phoenix-core/src/main/java/org/apache/phoenix/query/GuidePostsCache.java
index 1d9fa36..c9bda03 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/GuidePostsCache.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/GuidePostsCache.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.schema.PColumnFamily;
@@ -95,8 +95,7 @@ public class GuidePostsCache {
     protected class StatsLoader extends CacheLoader<GuidePostsKey, 
GuidePostsInfo> {
         @Override
         public GuidePostsInfo load(GuidePostsKey statsKey) throws Exception {
-            @SuppressWarnings("deprecation")
-            HTableInterface statsHTable = 
queryServices.getTable(SchemaUtil.getPhysicalName(
+            Table statsHTable = 
queryServices.getTable(SchemaUtil.getPhysicalName(
                     PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES,
                             queryServices.getProps()).getName());
             try {
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsUtil.java
index 57942b6..5c682eb 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsUtil.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
@@ -142,7 +143,7 @@ public class StatisticsUtil {
         return key;
     }
 
-    public static GuidePostsInfo readStatistics(HTableInterface statsHTable, 
GuidePostsKey key, long clientTimeStamp)
+    public static GuidePostsInfo readStatistics(Table statsHTable, 
GuidePostsKey key, long clientTimeStamp)
             throws IOException {
         ImmutableBytesWritable ptr = new ImmutableBytesWritable();
         ptr.set(key.getColumnFamily());
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
index f84b859..dfb5d13 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
@@ -17,6 +17,7 @@
  */
 package org.apache.phoenix.schema.stats;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.antlr.runtime.CharStream;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
@@ -25,27 +26,28 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.metrics.Gauge;
 import org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
 import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.htrace.SpanReceiver;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.mapreduce.util.ConnectionUtil;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.MRJobType;
-import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.SchemaType;
 import org.apache.phoenix.mapreduce.util.PhoenixMapReduceUtil;
+import org.apache.phoenix.util.SchemaUtil;
 import org.apache.tephra.TransactionNotInProgressException;
 import org.apache.tephra.TransactionSystemClient;
 import org.apache.tephra.hbase.coprocessor.TransactionProcessor;
@@ -58,9 +60,12 @@ import org.joda.time.Chronology;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.UUID;
-
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
+import java.sql.Connection;
+import java.util.List;
+
+import static 
org.apache.phoenix.query.QueryServices.IS_NAMESPACE_MAPPING_ENABLED;
+import static 
org.apache.phoenix.query.QueryServicesOptions.DEFAULT_IS_NAMESPACE_MAPPING_ENABLED;
 
 /**
  * Tool to collect table level statistics on HBase snapshot
@@ -77,24 +82,69 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
             "Restore Directory for HBase snapshot");
     private static final Option RUN_FOREGROUND_OPTION =
             new Option("runfg", "run-foreground", false,
-                    "If specified, runs UpdateStatisticsTool in Foreground. 
Default - Runs the build in background.");
+                    "If specified, runs UpdateStatisticsTool in Foreground. 
Default - Runs the build in background");
+    private static final Option MANAGE_SNAPSHOT_OPTION =
+            new Option("ms", "manage-snapshot", false,
+                    "Creates a new snapshot, runs the tool and deletes it");
+
     private static final Option HELP_OPTION = new Option("h", "help", false, 
"Help");
 
-    private Configuration conf;
     private String tableName;
     private String snapshotName;
     private Path restoreDir;
+    private boolean manageSnapshot;
     private boolean isForeground;
 
+    private Job job;
+
     @Override
     public int run(String[] args) throws Exception {
         parseArgs(args);
-        Job job = configureJob(conf, tableName, snapshotName, restoreDir);
+        preJobTask();
+        configureJob();
         TableMapReduceUtil.initCredentials(job);
-        return runJob(job, isForeground);
+        int ret = runJob();
+        postJobTask();
+        return ret;
+    }
+
+    /**
+     * Run any tasks before the MR job is launched
+     * Currently being used for snapshot creation
+     */
+    private void preJobTask() throws Exception {
+        if (!manageSnapshot) {
+            return;
+        }
+
+        try (final Connection conn = 
ConnectionUtil.getInputConnection(getConf())) {
+            HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+            boolean namespaceMapping = 
getConf().getBoolean(IS_NAMESPACE_MAPPING_ENABLED,
+                    DEFAULT_IS_NAMESPACE_MAPPING_ENABLED);
+            String physicalTableName =  
SchemaUtil.getPhysicalTableName(tableName.getBytes(),
+                    namespaceMapping).getNameAsString();
+            admin.snapshot(snapshotName, physicalTableName);
+            LOG.info("Successfully created snapshot " + snapshotName + " for " 
+ physicalTableName);
+        }
+    }
+
+    /**
+     * Run any tasks before the MR job is completed successfully
+     * Currently being used for snapshot deletion
+     */
+    private void postJobTask() throws Exception {
+        if (!manageSnapshot) {
+            return;
+        }
+
+        try (final Connection conn = 
ConnectionUtil.getInputConnection(getConf())) {
+            HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
+            admin.deleteSnapshot(snapshotName);
+            LOG.info("Successfully deleted snapshot " + snapshotName);
+        }
     }
 
-    private void parseArgs(String[] args) {
+    void parseArgs(String[] args) {
         CommandLine cmdLine = null;
         try {
             cmdLine = parseOptions(args);
@@ -102,24 +152,34 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
             printHelpAndExit(e.getMessage(), getOptions());
         }
 
-        conf = HBaseConfiguration.create();
+        if (getConf() == null) {
+            setConf(HBaseConfiguration.create());
+        }
+
         tableName = cmdLine.getOptionValue(TABLE_NAME_OPTION.getOpt());
         snapshotName = cmdLine.getOptionValue(SNAPSHOT_NAME_OPTION.getOpt());
+        if (snapshotName == null) {
+            snapshotName = "UpdateStatisticsTool_" + tableName + "_" + 
System.currentTimeMillis();
+        }
+
         String restoreDirOptionValue = 
cmdLine.getOptionValue(RESTORE_DIR_OPTION.getOpt());
         if (restoreDirOptionValue == null) {
-            restoreDirOptionValue = conf.get(FS_DEFAULT_NAME_KEY) + "/tmp";
+            restoreDirOptionValue = getConf().get(FS_DEFAULT_NAME_KEY) + 
"/tmp";
         }
+        
         restoreDir = new Path(restoreDirOptionValue);
+        manageSnapshot = cmdLine.hasOption(MANAGE_SNAPSHOT_OPTION.getOpt());
         isForeground = cmdLine.hasOption(RUN_FOREGROUND_OPTION.getOpt());
     }
 
-    Job configureJob(Configuration conf, String tableName,
-                     String snapshotName, Path restoreDir) throws Exception {
-        Job job = Job.getInstance(conf, "Update statistics for " + tableName);
+    private void configureJob() throws Exception {
+        job = Job.getInstance(getConf(),
+                "UpdateStatistics-" + tableName + "-" + snapshotName);
         PhoenixMapReduceUtil.setInput(job, NullDBWritable.class,
                 snapshotName, tableName, restoreDir);
 
         PhoenixConfigurationUtil.setMRJobType(job.getConfiguration(), 
MRJobType.UPDATE_STATS);
+
         // DO NOT allow mapper splits using statistics since it may result 
into many smaller chunks
         PhoenixConfigurationUtil.setSplitByStats(job.getConfiguration(), 
false);
 
@@ -136,19 +196,22 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
                 Cancellable.class, TTransportException.class, 
SpanReceiver.class, TransactionProcessor.class, Gauge.class, 
MetricRegistriesImpl.class);
         LOG.info("UpdateStatisticsTool running for: " + tableName
                 + " on snapshot: " + snapshotName + " with restore dir: " + 
restoreDir);
-
-        return job;
     }
 
-    int runJob(Job job, boolean isForeground) throws Exception {
-        if (isForeground) {
-            LOG.info("Running UpdateStatisticsTool in Foreground. " +
-                    "Runs full table scans. This may take a long time!.");
-            return (job.waitForCompletion(true)) ? 0 : 1;
-        } else {
-            LOG.info("Running UpdateStatisticsTool in Background - Submit 
async and exit");
-            job.submit();
-            return 0;
+    private int runJob() {
+        try {
+            if (isForeground) {
+                LOG.info("Running UpdateStatisticsTool in Foreground. " +
+                        "Runs full table scans. This may take a long time!");
+                return (job.waitForCompletion(true)) ? 0 : 1;
+            } else {
+                LOG.info("Running UpdateStatisticsTool in Background - Submit 
async and exit");
+                job.submit();
+                return 0;
+            }
+        } catch (Exception e) {
+            LOG.error("Caught exception " + e + " trying to update 
statistics.");
+            return 1;
         }
     }
 
@@ -169,7 +232,7 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
      * @param args supplied command line arguments
      * @return the parsed command line
      */
-    private CommandLine parseOptions(String[] args) {
+    CommandLine parseOptions(String[] args) {
 
         final Options options = getOptions();
 
@@ -190,9 +253,9 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
                     + "parameter");
         }
 
-        if (!cmdLine.hasOption(SNAPSHOT_NAME_OPTION.getOpt())) {
-            throw new IllegalStateException(SNAPSHOT_NAME_OPTION.getLongOpt() 
+ " is a mandatory "
-                    + "parameter");
+        if (cmdLine.hasOption(MANAGE_SNAPSHOT_OPTION.getOpt())
+                && !cmdLine.hasOption(RUN_FOREGROUND_OPTION.getOpt())) {
+            throw new IllegalStateException("Snapshot cannot be managed if job 
is running in background");
         }
 
         return cmdLine;
@@ -205,9 +268,25 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
         options.addOption(HELP_OPTION);
         options.addOption(RESTORE_DIR_OPTION);
         options.addOption(RUN_FOREGROUND_OPTION);
+        options.addOption(MANAGE_SNAPSHOT_OPTION);
         return options;
     }
 
+    public Job getJob() {
+        return job;
+    }
+
+    public String getSnapshotName() {
+        return snapshotName;
+    }
+
+    public Path getRestoreDir() {
+        return restoreDir;
+    }
+
+    /**
+     * Empty Mapper class since stats collection happens as part of scanner 
object
+     */
     public static class TableSnapshotMapper
             extends Mapper<NullWritable, NullDBWritable, NullWritable, 
NullWritable> {
 
@@ -218,6 +297,7 @@ public class UpdateStatisticsTool extends Configured 
implements Tool {
     }
 
     public static void main(String[] args) throws Exception {
-        ToolRunner.run(new UpdateStatisticsTool(), args);
+        int result = ToolRunner.run(new UpdateStatisticsTool(), args);
+        System.exit(result);
     }
 }
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java
new file mode 100644
index 0000000..86f97ff
--- /dev/null
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java
@@ -0,0 +1,76 @@
+package org.apache.phoenix.schema.stats;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.junit.Assert;
+import org.junit.Test;
+
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public class UpdateStatisticsToolTest {
+
+    @Test (expected = IllegalStateException.class)
+    public void testTableNameIsMandatory() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseOptions(new String[] {});
+    }
+
+    @Test (expected = IllegalStateException.class)
+    public void testManageSnapshotAndRunFgOption1() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseOptions(new String[] {"-t", "table1", "-ms"});
+    }
+
+    @Test
+    public void testManageSnapshotAndRunFgOption2() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        try {
+            tool.parseOptions(new String[] {"-t", "table1", "-ms", "-runfg"});
+        } catch (IllegalStateException e) {
+            fail("IllegalStateException is not expected " +
+                    "since all required parameters are provided.");
+        }
+    }
+
+    @Test
+    public void testSnapshotNameInput() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseArgs(new String[] {"-t", "table1", "-ms", "-runfg", "-s", 
"snap1"});
+        assertEquals("snap1", tool.getSnapshotName());
+    }
+
+    @Test
+    public void testSnapshotNameDefault() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseArgs(new String[] {"-t", "table1", "-ms", "-runfg"});
+        
assertTrue(tool.getSnapshotName().startsWith("UpdateStatisticsTool_table1_"));
+    }
+
+    @Test
+    public void testRestoreDirDefault() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseArgs(new String[] {"-t", "table1", "-ms", "-runfg"});
+        assertEquals("file:/tmp", tool.getRestoreDir().toString());
+    }
+
+    @Test
+    public void testRestoreDirInput() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseArgs(new String[] {"-t", "table1", "-d", "fs:/path"});
+        assertEquals("fs:/path", tool.getRestoreDir().toString());
+    }
+
+    @Test
+    public void testRestoreDirFromConfig() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        Configuration configuration = HBaseConfiguration.create();
+        configuration.set(FS_DEFAULT_NAME_KEY, "hdfs://base-dir");
+        tool.setConf(configuration);
+        tool.parseArgs(new String[] {"-t", "table1", "-ms", "-runfg"});
+        assertEquals("hdfs://base-dir/tmp", tool.getRestoreDir().toString());
+    }
+
+}
\ No newline at end of file

Reply via email to