[15/22] hbase-site git commit: Published site at ac5bb8155b618194fe9cf1131f0e72c99b7b534c.

2018-06-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a7f7dba1/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
index 99f53c4..2d5c946 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
@@ -123,7 +123,7 @@
 115  public static final boolean 
SPLIT_SKIP_ERRORS_DEFAULT = false;
 116
 117  // Parameters for split process
-118  protected final Path rootDir;
+118  protected final Path walDir;
 119  protected final FileSystem fs;
 120  protected final Configuration conf;
 121
@@ -156,14 +156,14 @@
 148
 149
 150  @VisibleForTesting
-151  WALSplitter(final WALFactory factory, 
Configuration conf, Path rootDir,
+151  WALSplitter(final WALFactory factory, 
Configuration conf, Path walDir,
 152  FileSystem fs, LastSequenceId 
idChecker,
 153  SplitLogWorkerCoordination 
splitLogWorkerCoordination) {
 154this.conf = 
HBaseConfiguration.create(conf);
 155String codecClassName = conf
 156
.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
 157
this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);
-158this.rootDir = rootDir;
+158this.walDir = walDir;
 159this.fs = fs;
 160this.sequenceIdChecker = idChecker;
 161this.splitLogWorkerCoordination = 
splitLogWorkerCoordination;
@@ -194,11 +194,11 @@
 186   * p
 187   * @return false if it is interrupted 
by the progress-able.
 188   */
-189  public static boolean splitLogFile(Path 
rootDir, FileStatus logfile, FileSystem fs,
+189  public static boolean splitLogFile(Path 
walDir, FileStatus logfile, FileSystem fs,
 190  Configuration conf, 
CancelableProgressable reporter, LastSequenceId idChecker,
 191  SplitLogWorkerCoordination 
splitLogWorkerCoordination, final WALFactory factory)
 192  throws IOException {
-193WALSplitter s = new 
WALSplitter(factory, conf, rootDir, fs, idChecker,
+193WALSplitter s = new 
WALSplitter(factory, conf, walDir, fs, idChecker,
 194splitLogWorkerCoordination);
 195return s.splitLogFile(logfile, 
reporter);
 196  }
@@ -330,10 +330,10 @@
 322  LOG.warn("Could not parse, 
corrupted WAL={}", logPath, e);
 323  if (splitLogWorkerCoordination != 
null) {
 324// Some tests pass in a csm of 
null.
-325
splitLogWorkerCoordination.markCorrupted(rootDir, logfile.getPath().getName(), 
fs);
+325
splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), 
fs);
 326  } else {
 327// for tests only
-328ZKSplitLog.markCorrupted(rootDir, 
logfile.getPath().getName(), fs);
+328ZKSplitLog.markCorrupted(walDir, 
logfile.getPath().getName(), fs);
 329  }
 330  isCorrupted = true;
 331} catch (IOException e) {
@@ -465,1437 +465,1442 @@
 457   * codelogEntry/code: 
e.g. /hbase/some_table/2323432434/recovered.edits/2332.
 458   * This method also ensures existence 
of RECOVERED_EDITS_DIR under the region
 459   * creating it if necessary.
-460   * @param fs
-461   * @param logEntry
-462   * @param rootDir HBase root dir.
-463   * @param fileNameBeingSplit the file 
being split currently. Used to generate tmp file name.
-464   * @return Path to file into which to 
dump split log edits.
-465   * @throws IOException
-466   */
-467  @SuppressWarnings("deprecation")
-468  @VisibleForTesting
-469  static Path 
getRegionSplitEditsPath(final FileSystem fs,
-470  final Entry logEntry, final Path 
rootDir, String fileNameBeingSplit)
-471  throws IOException {
-472Path tableDir = 
FSUtils.getTableDir(rootDir, logEntry.getKey().getTableName());
-473String encodedRegionName = 
Bytes.toString(logEntry.getKey().getEncodedRegionName());
-474Path regiondir = 
HRegion.getRegionDir(tableDir, encodedRegionName);
-475Path dir = 
getRegionDirRecoveredEditsDir(regiondir);
-476
-477if (!fs.exists(regiondir)) {
-478  LOG.info("This region's directory 
does not exist: {}."
-479  + "It is very likely that it 
was already split so it is "
-480  + "safe to discard those 
edits.", regiondir);
-481  return null;
-482}
-483if (fs.exists(dir)  
fs.isFile(dir)) {
-484  Path tmp = new Path("/tmp");
-485  if (!fs.exists(tmp)) {
-486fs.mkdirs(tmp);
-487  }
-488  tmp = new Path(tmp,
-489HConstants.RECOVERED_EDITS_DIR + 
"_" + encodedRegionName);
-490  LOG.warn("Found existing old file: 
{}. It could be some "
-491+ "leftover of an old 
installation. It should be a folder instead. "
-492+ "So moving it to {}", dir, 
tmp);
-493  if 

[15/22] hbase-site git commit: Published site at 46cb5dfa226892fd2580f26ce9ce77225bd7e67c.

2018-04-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bc57a1a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/RpcThrottlingException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/RpcThrottlingException.html
 
b/devapidocs/org/apache/hadoop/hbase/quotas/class-use/RpcThrottlingException.html
index f7bd99d..f5a70f3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/quotas/class-use/RpcThrottlingException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/quotas/class-use/RpcThrottlingException.html
@@ -124,19 +124,25 @@
 
 
 void
-TimeBasedLimiter.checkQuota(longwriteSize,
-  longreadSize)
+TimeBasedLimiter.checkQuota(longwriteReqs,
+  longestimateWriteSize,
+  longreadReqs,
+  longestimateReadSize)
 
 
 void
-QuotaLimiter.checkQuota(longestimateWriteSize,
+QuotaLimiter.checkQuota(longwriteReqs,
+  longestimateWriteSize,
+  longreadReqs,
   longestimateReadSize)
 Checks if it is possible to execute the specified 
operation.
 
 
 
 void
-NoopQuotaLimiter.checkQuota(longestimateWriteSize,
+NoopQuotaLimiter.checkQuota(longwriteReqs,
+  longestimateWriteSize,
+  longreadReqs,
   longestimateReadSize)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bc57a1a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 4e25e3e..152e2b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
-org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.RpcThrottlingException.Type
+org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.QuotaScope
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8bc57a1a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
index 1158ff7..df2be42 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
@@ -237,6 +237,15 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+StoreFileInfo(org.apache.hadoop.conf.Configurationconf,
+ org.apache.hadoop.fs.FileSystemfs,
+ org.apache.hadoop.fs.FileStatusfileStatus,
+ Referencereference,
+ HFileLinklink)
+Create a Store File Info from an HFileLink and a 
Reference
+
+
+
 StoreFileInfo(org.apache.hadoop.conf.Configurationconf,
  org.apache.hadoop.fs.FileSystemfs,
  org.apache.hadoop.fs.PathinitialPath)
@@ -622,39 +631,55 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 publicStoreFileInfo(org.apache.hadoop.conf.Configurationconf,
  org.apache.hadoop.fs.FileSystemfs,
  org.apache.hadoop.fs.FileStatusfileStatus,
- HFileLinklink)
-  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+ HFileLinklink)
 Create a Store File Info from an HFileLink
 
 Parameters:
-conf - the Configuration to use
-fs - The current file system to use.
+conf - The Configuration to use
+fs - The current file system to use
 fileStatus - The FileStatus of the file
-Throws:
-https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 
 
 
 
 
-
+
 
 StoreFileInfo
-publicStoreFileInfo(org.apache.hadoop.conf.Configurationconf,
+publicStoreFileInfo(org.apache.hadoop.conf.Configurationconf,
  

[15/22] hbase-site git commit: Published site at .

2017-11-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/hbase-build-configuration/hbase-archetypes/plugin-management.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/plugin-management.html 
b/hbase-build-configuration/hbase-archetypes/plugin-management.html
index 16b947d..b10b2f3 100644
--- a/hbase-build-configuration/hbase-archetypes/plugin-management.html
+++ b/hbase-build-configuration/hbase-archetypes/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Plugin Management
 
@@ -271,7 +271,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-21
+  Last Published: 
2017-11-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/hbase-build-configuration/hbase-archetypes/plugins.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/plugins.html 
b/hbase-build-configuration/hbase-archetypes/plugins.html
index 7a2331f..9b14260 100644
--- a/hbase-build-configuration/hbase-archetypes/plugins.html
+++ b/hbase-build-configuration/hbase-archetypes/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Plugins
 
@@ -214,7 +214,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-21
+  Last Published: 
2017-11-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/hbase-build-configuration/hbase-archetypes/project-info.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/project-info.html 
b/hbase-build-configuration/hbase-archetypes/project-info.html
index ed734e9..40e19ba 100644
--- a/hbase-build-configuration/hbase-archetypes/project-info.html
+++ b/hbase-build-configuration/hbase-archetypes/project-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Information
 
@@ -167,7 +167,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-21
+  Last Published: 
2017-11-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/hbase-build-configuration/hbase-archetypes/project-summary.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/project-summary.html 
b/hbase-build-configuration/hbase-archetypes/project-summary.html
index d5a615e..4be54fc 100644
--- a/hbase-build-configuration/hbase-archetypes/project-summary.html
+++ b/hbase-build-configuration/hbase-archetypes/project-summary.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Summary
 
@@ -163,7 +163,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-21
+  Last Published: 
2017-11-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/hbase-build-configuration/hbase-archetypes/source-repository.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/source-repository.html 
b/hbase-build-configuration/hbase-archetypes/source-repository.html
index a1f5b26..f698647 100644
--- a/hbase-build-configuration/hbase-archetypes/source-repository.html
+++ b/hbase-build-configuration/hbase-archetypes/source-repository.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Source Code Management
 
@@ -134,7 +134,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-11-21
+  Last Published: 
2017-11-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/34a201e5/hbase-build-configuration/hbase-archetypes/team-list.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/team-list.html 
b/hbase-build-configuration/hbase-archetypes/team-list.html
index 116f633..99ed205 100644
--- a/hbase-build-configuration/hbase-archetypes/team-list.html
+++ 

[15/22] hbase-site git commit: Published site at .

2017-08-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6f0c8299/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
index df7d03c..59d8ee8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
@@ -40,994 +40,1004 @@
 032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
 033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
 034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035
-036import java.io.IOException;
-037import java.net.URI;
-038import java.util.List;
-039
-040import 
org.apache.commons.cli.CommandLine;
-041import 
org.apache.commons.cli.HelpFormatter;
-042import org.apache.commons.cli.Options;
-043import 
org.apache.commons.lang.StringUtils;
-044import 
org.apache.hadoop.conf.Configuration;
-045import 
org.apache.hadoop.conf.Configured;
-046import org.apache.hadoop.fs.FileSystem;
-047import org.apache.hadoop.fs.Path;
-048import 
org.apache.hadoop.hbase.HBaseConfiguration;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-051import 
org.apache.hadoop.hbase.backup.BackupInfo;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-053import 
org.apache.hadoop.hbase.backup.BackupRequest;
-054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-055import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
-056import 
org.apache.hadoop.hbase.backup.BackupType;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
-058import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Connection;
-061import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-064
-065/**
-066 * General backup commands, options and 
usage messages
-067 */
-068
-069@InterfaceAudience.Private
-070public final class BackupCommands {
-071
-072  public final static String 
INCORRECT_USAGE = "Incorrect usage";
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
+037
+038import java.io.IOException;
+039import java.net.URI;
+040import java.util.List;
+041
+042import 
org.apache.commons.cli.CommandLine;
+043import 
org.apache.commons.cli.HelpFormatter;
+044import org.apache.commons.cli.Options;
+045import 
org.apache.commons.lang.StringUtils;
+046import 
org.apache.hadoop.conf.Configuration;
+047import 
org.apache.hadoop.conf.Configured;
+048import org.apache.hadoop.fs.FileSystem;
+049import org.apache.hadoop.fs.Path;
+050import 
org.apache.hadoop.hbase.HBaseConfiguration;
+051import 
org.apache.hadoop.hbase.TableName;
+052import 
org.apache.hadoop.hbase.backup.BackupAdmin;
+053import 
org.apache.hadoop.hbase.backup.BackupInfo;
+054import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+055import 
org.apache.hadoop.hbase.backup.BackupRequest;
+056import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+057import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
+058import 
org.apache.hadoop.hbase.backup.BackupType;
+059import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+060import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
+061import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+062import 
org.apache.hadoop.hbase.client.Connection;
+063import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+064import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066
+067/**
+068 * General backup commands, options and 
usage messages
+069 */
+070
+071@InterfaceAudience.Private
+072public final class BackupCommands {
 073
-074  public static final String USAGE = 
"Usage: hbase backup COMMAND [command-specific arguments]\n"
-075  + "where COMMAND is one of:\n" + "  
create create a new backup image\n"
-076  + "  delete delete an existing 
backup image\n"
-077  + "  describe   show the detailed 
information of a backup image\n"
-078  + "  historyshow history of all 
successful backups\n"
-079  + "  progress   show the progress 
of the latest backup request\n"
-080  + "  setbackup set 
management\n"
-081  + "  

[15/22] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9830530/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.ExecuteProceduresRemoteCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.ExecuteProceduresRemoteCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.ExecuteProceduresRemoteCall.html
index f0a9b50..13cebd8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.ExecuteProceduresRemoteCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.ExecuteProceduresRemoteCall.html
@@ -69,7 +69,7 @@
 061  
"hbase.regionserver.rpc.startup.waittime";
 062  private static final int 
DEFAULT_RS_RPC_STARTUP_WAIT_TIME = 6;
 063
-064  private static final int 
RS_VERSION_WITH_EXEC_PROCS = 0x0201000; // 2.1
+064  private static final int 
RS_VERSION_WITH_EXEC_PROCS = 0x020; // 2.0
 065
 066  protected final MasterServices 
master;
 067  protected final long 
rsStartupWaitTime;
@@ -267,286 +267,289 @@
 259  final MasterProcedureEnv env = 
master.getMasterProcedureExecutor().getEnvironment();
 260
 261  request = 
ExecuteProceduresRequest.newBuilder();
-262  
splitAndResolveOperation(getServerName(), operations, this);
-263
-264  try {
-265final ExecuteProceduresResponse 
response = sendRequest(getServerName(), request.build());
-266remoteCallCompleted(env, 
response);
-267  } catch (IOException e) {
-268e = unwrapException(e);
-269// TODO: In the future some 
operation may want to bail out early.
-270// TODO: How many times should we 
retry (use numberOfAttemptsSoFar)
-271if (!scheduleForRetry(e)) {
-272  remoteCallFailed(env, e);
-273}
-274  }
-275  return null;
-276}
-277
-278public void 
dispatchOpenRequests(final MasterProcedureEnv env,
-279final 
ListRegionOpenOperation operations) {
-280  
request.addOpenRegion(buildOpenRegionRequest(env, getServerName(), 
operations));
-281}
-282
-283public void 
dispatchCloseRequests(final MasterProcedureEnv env,
-284final 
ListRegionCloseOperation operations) {
-285  for (RegionCloseOperation op: 
operations) {
-286
request.addCloseRegion(op.buildCloseRegionRequest(getServerName()));
-287  }
-288}
-289
-290protected ExecuteProceduresResponse 
sendRequest(final ServerName serverName,
-291final ExecuteProceduresRequest 
request) throws IOException {
-292  try {
-293return 
getRsAdmin().executeProcedures(null, request);
-294  } catch (ServiceException se) {
-295throw 
ProtobufUtil.getRemoteException(se);
-296  }
-297}
-298
-299
-300private void 
remoteCallCompleted(final MasterProcedureEnv env,
-301final ExecuteProceduresResponse 
response) {
-302  /*
-303  for (RemoteProcedure proc: 
operations) {
-304proc.remoteCallCompleted(env, 
getServerName(), response);
-305  }*/
-306}
-307
-308private void remoteCallFailed(final 
MasterProcedureEnv env, final IOException e) {
-309  for (RemoteProcedure proc: 
operations) {
-310proc.remoteCallFailed(env, 
getServerName(), e);
-311  }
-312}
-313  }
-314
-315  // 
==
-316  //  Compatibility calls
-317  //  Since we don't have a "batch 
proc-exec" request on the target RS
-318  //  we have to chunk the requests by 
type and dispatch the specific request.
-319  // 
==
-320  private static OpenRegionRequest 
buildOpenRegionRequest(final MasterProcedureEnv env,
-321  final ServerName serverName, final 
ListRegionOpenOperation operations) {
-322final OpenRegionRequest.Builder 
builder = OpenRegionRequest.newBuilder();
-323
builder.setServerStartCode(serverName.getStartcode());
-324
builder.setMasterSystemTime(EnvironmentEdgeManager.currentTime());
-325for (RegionOpenOperation op: 
operations) {
-326  
builder.addOpenInfo(op.buildRegionOpenInfoRequest(env));
-327}
-328return builder.build();
-329  }
-330
-331  private final class 
OpenRegionRemoteCall extends AbstractRSRemoteCall {
-332private final 
ListRegionOpenOperation operations;
+262  if (LOG.isTraceEnabled()) {
+263LOG.trace("Building request with 
operations count=" + operations.size());
+264  }
+265  
splitAndResolveOperation(getServerName(), operations, this);
+266
+267  try {
+268final ExecuteProceduresResponse 
response = sendRequest(getServerName(), request.build());
+269remoteCallCompleted(env, 
response);
+270  } catch (IOException e) {
+271e = unwrapException(e);
+272// TODO: In the future some 

[15/22] hbase-site git commit: Published site at da5fb27eabed4a4b4d251be973ee945fb52895bf.

2017-04-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7b1830cf/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
index 2a09edb..b6fb3b4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
@@ -37,131 +37,131 @@
 029import java.util.Comparator;
 030import java.util.LinkedList;
 031import java.util.List;
-032import java.util.Random;
-033
-034import 
org.apache.commons.cli.CommandLine;
-035import org.apache.commons.cli.Option;
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.fs.FSDataInputStream;
-041import 
org.apache.hadoop.fs.FSDataOutputStream;
-042import 
org.apache.hadoop.fs.FileChecksum;
-043import org.apache.hadoop.fs.FileStatus;
-044import org.apache.hadoop.fs.FileSystem;
-045import org.apache.hadoop.fs.FileUtil;
-046import org.apache.hadoop.fs.Path;
-047import 
org.apache.hadoop.fs.permission.FsPermission;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.HConstants;
-051import 
org.apache.hadoop.hbase.HRegionInfo;
-052import 
org.apache.hadoop.hbase.io.FileLink;
-053import 
org.apache.hadoop.hbase.io.HFileLink;
-054import 
org.apache.hadoop.hbase.io.WALLink;
-055import 
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-056import 
org.apache.hadoop.hbase.mob.MobUtils;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-060import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
-061import 
org.apache.hadoop.hbase.util.FSUtils;
-062import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-063import 
org.apache.hadoop.hbase.util.Pair;
-064import 
org.apache.hadoop.io.BytesWritable;
-065import org.apache.hadoop.io.IOUtils;
-066import 
org.apache.hadoop.io.NullWritable;
-067import org.apache.hadoop.io.Writable;
-068import org.apache.hadoop.mapreduce.Job;
-069import 
org.apache.hadoop.mapreduce.JobContext;
-070import 
org.apache.hadoop.mapreduce.Mapper;
-071import 
org.apache.hadoop.mapreduce.InputFormat;
-072import 
org.apache.hadoop.mapreduce.InputSplit;
-073import 
org.apache.hadoop.mapreduce.RecordReader;
-074import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-075import 
org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-076import 
org.apache.hadoop.mapreduce.security.TokenCache;
-077import 
org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;
-078import 
org.apache.hadoop.util.StringUtils;
-079import org.apache.hadoop.util.Tool;
-080
-081/**
-082 * Export the specified snapshot to a 
given FileSystem.
-083 *
-084 * The .snapshot/name folder is copied to 
the destination cluster
-085 * and then all the hfiles/wals are 
copied using a Map-Reduce Job in the .archive/ location.
-086 * When everything is done, the second 
cluster can restore the snapshot.
-087 */
-088@InterfaceAudience.Public
-089public class ExportSnapshot extends 
AbstractHBaseTool implements Tool {
-090  public static final String NAME = 
"exportsnapshot";
-091  /** Configuration prefix for overrides 
for the source filesystem */
-092  public static final String 
CONF_SOURCE_PREFIX = NAME + ".from.";
-093  /** Configuration prefix for overrides 
for the destination filesystem */
-094  public static final String 
CONF_DEST_PREFIX = NAME + ".to.";
-095
-096  private static final Log LOG = 
LogFactory.getLog(ExportSnapshot.class);
-097
-098  private static final String MR_NUM_MAPS 
= "mapreduce.job.maps";
-099  private static final String 
CONF_NUM_SPLITS = "snapshot.export.format.splits";
-100  private static final String 
CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";
-101  private static final String 
CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";
-102  private static final String 
CONF_FILES_USER = "snapshot.export.files.attributes.user";
-103  private static final String 
CONF_FILES_GROUP = "snapshot.export.files.attributes.group";
-104  private static final String 
CONF_FILES_MODE = "snapshot.export.files.attributes.mode";
-105  private static final String 
CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";
-106  private static final String 
CONF_OUTPUT_ROOT = "snapshot.export.output.root";
-107  private static final String 
CONF_INPUT_ROOT =