[09/41] hbase git commit: HBASE-18640 Move mapreduce out of hbase-server into separate module.

2017-08-26 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/59d03410/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
deleted file mode 100644
index e669f14..000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ /dev/null
@@ -1,406 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase;
-
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.TableSnapshotScanner;
-import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-import org.apache.hadoop.hbase.mapreduce.TableMapper;
-import org.apache.hadoop.hbase.util.AbstractHBaseTool;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.ToolRunner;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
-
-/**
- * A simple performance evaluation tool for single client and MR scans
- * and snapshot scans.
- */
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-public class ScanPerformanceEvaluation extends AbstractHBaseTool {
-
-  private static final String HBASE_COUNTER_GROUP_NAME = "HBase Counters";
-
-  private String type;
-  private String file;
-  private String tablename;
-  private String snapshotName;
-  private String restoreDir;
-  private String caching;
-
-  @Override
-  public void setConf(Configuration conf) {
-super.setConf(conf);
-Path rootDir;
-try {
-  rootDir = FSUtils.getRootDir(conf);
-  rootDir.getFileSystem(conf);
-} catch (IOException ex) {
-  throw new RuntimeException(ex);
-}
-  }
-
-  @Override
-  protected void addOptions() {
-this.addRequiredOptWithArg("t", "type", "the type of the test. One of the 
following: streaming|scan|snapshotscan|scanmapreduce|snapshotscanmapreduce");
-this.addOptWithArg("f", "file", "the filename to read from");
-this.addOptWithArg("tn", "table", "the tablename to read from");
-this.addOptWithArg("sn", "snapshot", "the snapshot name to read from");
-this.addOptWithArg("rs", "restoredir", "the directory to restore the 
snapshot");
-this.addOptWithArg("ch", "caching", "scanner caching value");
-  }
-
-  @Override
-  protected void processOptions(CommandLine cmd) {
-type = cmd.getOptionValue("type");
-file = cmd.getOptionValue("file");
-tablename = cmd.getOptionValue("table");
-snapshotName = cmd.getOptionValue("snapshot");
-restoreDir = cmd.getOptionValue("restoredir");
-caching = cmd.getOptionValue("caching");
-  }
-
-  protected void testHdfsStreaming(Path filename) throws IOException {
-byte[] buf = new byte[1024];
-FileSystem fs = filename.getFileSystem(getConf());
-
-// read the file from start to finish
-Stopwatch fileOpenTimer = Stopwatch.createUnstarted();
-Stopwatch streamTimer = Stopwatch.createUnstarted();
-
-fileOpenTimer.start();
-FSDataInputStream in = fs.open(filename);
-fileOpenTimer.stop();
-
-long totalBytes = 0;
-

[09/41] hbase git commit: HBASE-18640 Move mapreduce out of hbase-server into separate module.

2017-08-25 Thread appy
http://git-wip-us.apache.org/repos/asf/hbase/blob/664b6be0/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
deleted file mode 100644
index e669f14..000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ /dev/null
@@ -1,406 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase;
-
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.TableSnapshotScanner;
-import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-import org.apache.hadoop.hbase.mapreduce.TableMapper;
-import org.apache.hadoop.hbase.util.AbstractHBaseTool;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.ToolRunner;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
-
-/**
- * A simple performance evaluation tool for single client and MR scans
- * and snapshot scans.
- */
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-public class ScanPerformanceEvaluation extends AbstractHBaseTool {
-
-  private static final String HBASE_COUNTER_GROUP_NAME = "HBase Counters";
-
-  private String type;
-  private String file;
-  private String tablename;
-  private String snapshotName;
-  private String restoreDir;
-  private String caching;
-
-  @Override
-  public void setConf(Configuration conf) {
-super.setConf(conf);
-Path rootDir;
-try {
-  rootDir = FSUtils.getRootDir(conf);
-  rootDir.getFileSystem(conf);
-} catch (IOException ex) {
-  throw new RuntimeException(ex);
-}
-  }
-
-  @Override
-  protected void addOptions() {
-this.addRequiredOptWithArg("t", "type", "the type of the test. One of the 
following: streaming|scan|snapshotscan|scanmapreduce|snapshotscanmapreduce");
-this.addOptWithArg("f", "file", "the filename to read from");
-this.addOptWithArg("tn", "table", "the tablename to read from");
-this.addOptWithArg("sn", "snapshot", "the snapshot name to read from");
-this.addOptWithArg("rs", "restoredir", "the directory to restore the 
snapshot");
-this.addOptWithArg("ch", "caching", "scanner caching value");
-  }
-
-  @Override
-  protected void processOptions(CommandLine cmd) {
-type = cmd.getOptionValue("type");
-file = cmd.getOptionValue("file");
-tablename = cmd.getOptionValue("table");
-snapshotName = cmd.getOptionValue("snapshot");
-restoreDir = cmd.getOptionValue("restoredir");
-caching = cmd.getOptionValue("caching");
-  }
-
-  protected void testHdfsStreaming(Path filename) throws IOException {
-byte[] buf = new byte[1024];
-FileSystem fs = filename.getFileSystem(getConf());
-
-// read the file from start to finish
-Stopwatch fileOpenTimer = Stopwatch.createUnstarted();
-Stopwatch streamTimer = Stopwatch.createUnstarted();
-
-fileOpenTimer.start();
-FSDataInputStream in = fs.open(filename);
-fileOpenTimer.stop();
-
-long totalBytes = 0;
-