[1/2] git commit: HBASE-11326 Use an InputFormat for ExportSnapshot

2014-08-22 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/0.98 38deb4b82 - 333ea48ae


HBASE-11326 Use an InputFormat for ExportSnapshot


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/333ea48a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/333ea48a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/333ea48a

Branch: refs/heads/0.98
Commit: 333ea48ae2fef96a9d55ba5f920e4db2274614f7
Parents: 488afeb
Author: Matteo Bertozzi matteo.berto...@cloudera.com
Authored: Thu Jun 12 09:06:00 2014 +0100
Committer: Matteo Bertozzi matteo.berto...@cloudera.com
Committed: Fri Aug 22 10:17:20 2014 +0100

--
 .../hadoop/hbase/snapshot/ExportSnapshot.java   | 326 ---
 .../hbase/snapshot/TestExportSnapshot.java  |  21 +-
 2 files changed, 215 insertions(+), 132 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/333ea48a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 0639218..4a25a3c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.snapshot;
 
 import java.io.BufferedInputStream;
 import java.io.FileNotFoundException;
+import java.io.DataInput;
+import java.io.DataOutput;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
@@ -63,8 +65,14 @@ import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.mapreduce.security.TokenCache;
@@ -84,6 +92,10 @@ import org.apache.hadoop.util.ToolRunner;
 public class ExportSnapshot extends Configured implements Tool {
   private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
 
+  private static final String MR_NUM_MAPS = mapreduce.job.maps;
+  private static final String CONF_NUM_SPLITS = 
snapshot.export.format.splits;
+  private static final String CONF_SNAPSHOT_NAME = 
snapshot.export.format.snapshot.name;
+  private static final String CONF_SNAPSHOT_DIR = 
snapshot.export.format.snapshot.dir;
   private static final String CONF_FILES_USER = 
snapshot.export.files.attributes.user;
   private static final String CONF_FILES_GROUP = 
snapshot.export.files.attributes.group;
   private static final String CONF_FILES_MODE = 
snapshot.export.files.attributes.mode;
@@ -462,19 +474,23 @@ public class ExportSnapshot extends Configured implements 
Tool {
 }
   }
 
+  // ==
+  //  Input Format
+  // ==
+
   /**
* Extract the list of files (HFiles/HLogs) to copy using Map-Reduce.
* @return list of files referenced by the snapshot (pair of path and size)
*/
-  private ListPairSnapshotFileInfo, Long getSnapshotFiles(final FileSystem 
fs,
-  final Path snapshotDir) throws IOException {
+  private static ListPairSnapshotFileInfo, Long getSnapshotFiles(final 
Configuration conf,
+  final FileSystem fs, final Path snapshotDir) throws IOException {
 SnapshotDescription snapshotDesc = 
SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
 
 final ListPairSnapshotFileInfo, Long files = new 
ArrayListPairSnapshotFileInfo, Long();
 final TableName table = TableName.valueOf(snapshotDesc.getTable());
-final Configuration conf = getConf();
 
 // Get snapshot files
+LOG.info(Loading Snapshot ' + snapshotDesc.getName() + ' hfile list);
 SnapshotReferenceUtil.visitReferencedFiles(conf, fs, snapshotDir, 
snapshotDesc,
   new SnapshotReferenceUtil.SnapshotVisitor() {
 @Override
@@ -492,7 +508,12 @@ public class ExportSnapshot extends Configured implements 
Tool {
   .setHfile(path.toString())
   .build();
 
-long size = new HFileLink(conf, path).getFileStatus(fs).getLen();
+   

[2/2] git commit: HBASE-11450 Improve file size info in SnapshotInfo tool (addendum)

2014-08-22 Thread mbertozzi
HBASE-11450 Improve file size info in SnapshotInfo tool (addendum)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/488afeb4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/488afeb4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/488afeb4

Branch: refs/heads/0.98
Commit: 488afeb45cceaa51bc9302814c6661ec148b9d96
Parents: 38deb4b
Author: Matteo Bertozzi matteo.berto...@cloudera.com
Authored: Fri Aug 22 10:16:42 2014 +0100
Committer: Matteo Bertozzi matteo.berto...@cloudera.com
Committed: Fri Aug 22 10:17:20 2014 +0100

--
 .../hadoop/hbase/snapshot/SnapshotInfo.java | 36 
 1 file changed, 29 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/488afeb4/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index fd41edc..de25394 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -77,10 +77,12 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   public static class SnapshotStats {
 /** Information about the file referenced by the snapshot */
 static class FileInfo {
+  private final boolean corrupted;
   private final boolean inArchive;
   private final long size;
 
-  FileInfo(final boolean inArchive, final long size) {
+  FileInfo(final boolean inArchive, final long size, final boolean 
corrupted) {
+this.corrupted = corrupted;
 this.inArchive = inArchive;
 this.size = size;
   }
@@ -90,6 +92,11 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 return this.inArchive;
   }
 
+  /** @return true if the file is corrupted */
+  public boolean isCorrupted() {
+return this.corrupted;
+  }
+
   /** @return true if the file is missing */
   public boolean isMissing() {
 return this.size  0;
@@ -101,6 +108,7 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   }
 
   String getStateToString() {
+if (isCorrupted()) return CORRUPTED;
 if (isMissing()) return NOT FOUND;
 if (inArchive()) return archive;
 return null;
@@ -108,6 +116,7 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 }
 
 private AtomicInteger hfileArchiveCount = new AtomicInteger();
+private AtomicInteger hfilesCorrupted = new AtomicInteger();
 private AtomicInteger hfilesMissing = new AtomicInteger();
 private AtomicInteger hfilesCount = new AtomicInteger();
 private AtomicInteger logsMissing = new AtomicInteger();
@@ -136,7 +145,9 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 
 /** @return true if the snapshot is corrupted */
 public boolean isSnapshotCorrupted() {
-  return hfilesMissing.get()  0 || logsMissing.get()  0;
+  return hfilesMissing.get()  0 ||
+ logsMissing.get()  0 ||
+ hfilesCorrupted.get()  0;
 }
 
 /** @return the number of available store files */
@@ -159,6 +170,11 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   return hfilesMissing.get();
 }
 
+/** @return the number of corrupted store files */
+public int getCorruptedStoreFilesCount() {
+  return hfilesCorrupted.get();
+}
+
 /** @return the number of missing log files */
 public int getMissingLogsCount() {
   return logsMissing.get();
@@ -200,6 +216,7 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
   HFileLink link = HFileLink.create(conf, snapshotTable, 
region.getEncodedName(),
 family, storeFile.getName());
+  boolean isCorrupted = false;
   boolean inArchive = false;
   long size = -1;
   try {
@@ -212,10 +229,12 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   hfileSize.addAndGet(size);
   hfilesCount.incrementAndGet();
 }
+isCorrupted = (storeFile.hasFileSize()  storeFile.getFileSize() != 
size);
+if (isCorrupted) hfilesCorrupted.incrementAndGet();
   } catch (FileNotFoundException e) {
 hfilesMissing.incrementAndGet();
   }
-  return new FileInfo(inArchive, size);
+  return new FileInfo(inArchive, size, isCorrupted);
 }
 
 /**
@@ -234,7 +253,7 @@ public 

git commit: HBASE-11802 Scan copy constructor doesn't copy reversed member variable

2014-08-22 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/master 3b57f28da - 6f00f859a


HBASE-11802 Scan copy constructor doesn't copy reversed member variable


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6f00f859
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6f00f859
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6f00f859

Branch: refs/heads/master
Commit: 6f00f859a75d617924158b4b40aaa8618dd1a6ae
Parents: 3b57f28
Author: Ramkrishna ramkrishna.s.vasude...@intel.com
Authored: Fri Aug 22 22:03:57 2014 +0530
Committer: Ramkrishna ramkrishna.s.vasude...@intel.com
Committed: Fri Aug 22 22:03:57 2014 +0530

--
 .../org/apache/hadoop/hbase/client/Scan.java | 19 ++-
 1 file changed, 10 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6f00f859/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 74bf37f..d42433d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -19,6 +19,15 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -29,15 +38,6 @@ import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
 /**
  * Used to perform Scan operations.
  * p
@@ -210,6 +210,7 @@ public class Scan extends Query {
 filter = scan.getFilter(); // clone?
 loadColumnFamiliesOnDemand = scan.getLoadColumnFamiliesOnDemandValue();
 consistency = scan.getConsistency();
+reversed = scan.isReversed();
 TimeRange ctr = scan.getTimeRange();
 tr = new TimeRange(ctr.getMin(), ctr.getMax());
 Mapbyte[], NavigableSetbyte[] fams = scan.getFamilyMap();



git commit: HBASE-11802 Scan copy constructor doesn't copy reversed member variable

2014-08-22 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1 600bee44b - 6aba7cf40


HBASE-11802 Scan copy constructor doesn't copy reversed member variable


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6aba7cf4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6aba7cf4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6aba7cf4

Branch: refs/heads/branch-1
Commit: 6aba7cf40ee57e13dec68773bb5565183332b448
Parents: 600bee4
Author: Ramkrishna ramkrishna.s.vasude...@intel.com
Authored: Fri Aug 22 22:05:37 2014 +0530
Committer: Ramkrishna ramkrishna.s.vasude...@intel.com
Committed: Fri Aug 22 22:05:37 2014 +0530

--
 .../org/apache/hadoop/hbase/client/Scan.java | 19 ++-
 1 file changed, 10 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6aba7cf4/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 74bf37f..d42433d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -19,6 +19,15 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -29,15 +38,6 @@ import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
 /**
  * Used to perform Scan operations.
  * p
@@ -210,6 +210,7 @@ public class Scan extends Query {
 filter = scan.getFilter(); // clone?
 loadColumnFamiliesOnDemand = scan.getLoadColumnFamiliesOnDemandValue();
 consistency = scan.getConsistency();
+reversed = scan.isReversed();
 TimeRange ctr = scan.getTimeRange();
 tr = new TimeRange(ctr.getMin(), ctr.getMax());
 Mapbyte[], NavigableSetbyte[] fams = scan.getFamilyMap();



git commit: HBASE-11802 - Scan copy constructor doesn't copy reversed member variable

2014-08-22 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/0.98 333ea48ae - b21dcbf32


HBASE-11802 - Scan copy constructor doesn't copy reversed member variable


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b21dcbf3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b21dcbf3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b21dcbf3

Branch: refs/heads/0.98
Commit: b21dcbf3216b69c192b499a090ee71552226a3f8
Parents: 333ea48
Author: Ramkrishna ramkrishna.s.vasude...@intel.com
Authored: Fri Aug 22 22:09:58 2014 +0530
Committer: Ramkrishna ramkrishna.s.vasude...@intel.com
Committed: Fri Aug 22 22:09:58 2014 +0530

--
 .../org/apache/hadoop/hbase/client/Scan.java | 19 ++-
 1 file changed, 10 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b21dcbf3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index bf968df..4527d56 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -19,6 +19,15 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -29,15 +38,6 @@ import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
 /**
  * Used to perform Scan operations.
  * p
@@ -211,6 +211,7 @@ public class Scan extends Query {
 loadColumnFamiliesOnDemand = scan.getLoadColumnFamiliesOnDemandValue();
 TimeRange ctr = scan.getTimeRange();
 tr = new TimeRange(ctr.getMin(), ctr.getMax());
+reversed = scan.isReversed();
 Mapbyte[], NavigableSetbyte[] fams = scan.getFamilyMap();
 for (Map.Entrybyte[],NavigableSetbyte[] entry : fams.entrySet()) {
   byte [] fam = entry.getKey();



git commit: HBASE-11779 Document the new requirement to set JAVA_HOME before starting HBase (Misty Stanley-Jones)

2014-08-22 Thread ndimiduk
Repository: hbase
Updated Branches:
  refs/heads/master 6f00f859a - a2fc3efeb


HBASE-11779 Document the new requirement to set JAVA_HOME before starting HBase 
(Misty Stanley-Jones)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a2fc3efe
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a2fc3efe
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a2fc3efe

Branch: refs/heads/master
Commit: a2fc3efebfb277d2e57712a4c5b210a01fd7d5c8
Parents: 6f00f85
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri Aug 22 10:28:47 2014 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri Aug 22 10:29:02 2014 -0700

--
 src/main/docbkx/configuration.xml   | 37 
 src/main/docbkx/getting_started.xml | 26 +++---
 2 files changed, 42 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a2fc3efe/src/main/docbkx/configuration.xml
--
diff --git a/src/main/docbkx/configuration.xml 
b/src/main/docbkx/configuration.xml
index 5949b0a..898aa85 100644
--- a/src/main/docbkx/configuration.xml
+++ b/src/main/docbkx/configuration.xml
@@ -62,6 +62,11 @@
 paraScript for Windows and Linux / Unix environments to set up the 
working environment for
 HBase, including the location of Java, Java options, and other 
environment variables. The
 file contains many commented-out examples to provide guidance./para
+note
+  paraIn HBase 0.98.5 and newer, you must set 
envarJAVA_HOME/envar on each node of
+your cluster. filenamehbase-env.sh/filename provides a handy 
mechanism to do
+this./para
+/note
   /listitem
 /varlistentry
 varlistentry
@@ -177,6 +182,12 @@
   /tgroup
 /table
 
+note
+  paraIn HBase 0.98.5 and newer, you must set envarJAVA_HOME/envar 
on each node of
+your cluster. filenamehbase-env.sh/filename provides a handy 
mechanism to do
+this./para
+/note
+
 variablelist
   xml:id=os
   titleOperating System Utilities/title
@@ -1187,27 +1198,17 @@ example9
 xml:id=hbase_env
 titlefilenamehbase-env.sh/filename/title
 
-paraBelow we use a commanddiff/command to show the differences 
from default in the
-filenamehbase-env.sh/filename file. Here we are setting the 
HBase heap to be 4G
-  instead of the default 1G./para
+paraThe following lines in the filenamehbase-env.sh/filename 
file show how to set the
+envarJAVA_HOME/envar environment variable (required for HBase 
0.98.5 and newer) and
+  set the heap to 4 GB (rather than the default value of 1 GB). If you 
copy and paste this
+  example, be sure to adjust the envarJAVA_HOME/envar to suit your 
environment./para
 
 screen language=bourne
+# The java implementation to use.
+export JAVA_HOME=/usr/java/jdk1.7.0/  
 
-$ git diff hbase-env.sh
-diff --git a/conf/hbase-env.sh b/conf/hbase-env.sh
-index e70ebc6..96f8c27 100644
 a/conf/hbase-env.sh
-+++ b/conf/hbase-env.sh
-@@ -31,7 +31,7 @@ export JAVA_HOME=/usr/lib//jvm/java-6-sun/
- # export HBASE_CLASSPATH=
-
- # The maximum amount of heap to use, in MB. Default is 1000.
--# export HBASE_HEAPSIZE=1000
-+export HBASE_HEAPSIZE=4096
-
- # Extra Java runtime options.
- # Below are what we set by default.  May only work with SUN JVM.
-
+# The maximum amount of heap to use, in MB. Default is 1000.
+export HBASE_HEAPSIZE=4096
 /screen
 
 paraUse commandrsync/command to copy the content of the 
filenameconf/filename

http://git-wip-us.apache.org/repos/asf/hbase/blob/a2fc3efe/src/main/docbkx/getting_started.xml
--
diff --git a/src/main/docbkx/getting_started.xml 
b/src/main/docbkx/getting_started.xml
index b1ff0ea..2e0ee4e 100644
--- a/src/main/docbkx/getting_started.xml
+++ b/src/main/docbkx/getting_started.xml
@@ -33,9 +33,9 @@
   section
 titleIntroduction/title
 
-paraxref
-linkend=quickstart / will get you up and running on a single-node, 
standalone instance of
-  HBase. /para
+paraxref linkend=quickstart/ will get you up and running on a 
single-node, standalone
+  instance of HBase, followed by a pseudo-distributed single-machine 
instance, and finally a
+  fully-distributed cluster. /para
   /section
 
   section
@@ -117,6 +117,26 @@ $ cd hbase-![CDATA[?eval ${project.version}?]]-hadoop2/
   /screen
 /step
 step
+  paraFor HBase 0.98.5 and later, you are required to set the 
envarJAVA_HOME/envar
+environment variable before starting HBase. Prior to 0.98.5, HBase 
attempted to 

git commit: HBASE-11800 Make HTableInterface coprocessorService methods public

2014-08-22 Thread garyh
Repository: hbase
Updated Branches:
  refs/heads/master a2fc3efeb - db520b94c


HBASE-11800 Make HTableInterface coprocessorService methods public


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/db520b94
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/db520b94
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/db520b94

Branch: refs/heads/master
Commit: db520b94cbf961408e606040763da8b45616c70f
Parents: a2fc3ef
Author: Gary Helmling ga...@apache.org
Authored: Fri Aug 22 11:04:28 2014 -0700
Committer: Gary Helmling ga...@apache.org
Committed: Fri Aug 22 11:06:08 2014 -0700

--
 .../apache/hadoop/hbase/client/HTableInterface.java  |  5 -
 .../hadoop/hbase/client/coprocessor/Batch.java   | 11 +--
 .../hadoop/hbase/ipc/CoprocessorRpcChannel.java  | 15 +++
 3 files changed, 16 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/db520b94/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
index c02fcee..46b13b1 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
@@ -462,7 +462,6 @@ public interface HTableInterface extends Closeable {
* @param row The row key used to identify the remote region location
* @return A CoprocessorRpcChannel instance
*/
-  @InterfaceAudience.Private // TODO add coproc audience level  
   CoprocessorRpcChannel coprocessorService(byte[] row);
 
   /**
@@ -486,7 +485,6 @@ public interface HTableInterface extends Closeable {
* {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} method
* @return a map of result values keyed by region name
*/
-  @InterfaceAudience.Private // TODO add coproc audience level
   T extends Service, R Mapbyte[],R coprocessorService(final ClassT 
service,
   byte[] startKey, byte[] endKey, final Batch.CallT,R callable)
   throws ServiceException, Throwable;
@@ -518,7 +516,6 @@ public interface HTableInterface extends Closeable {
* @param R Return type for the {@code callable} parameter's
* {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} method
*/
-  @InterfaceAudience.Private // TODO add coproc audience level
   T extends Service, R void coprocessorService(final ClassT service,
   byte[] startKey, byte[] endKey, final Batch.CallT,R callable,
   final Batch.CallbackR callback) throws ServiceException, Throwable;
@@ -622,7 +619,6 @@ public interface HTableInterface extends Closeable {
* @throws Throwable
* @return a map of result values keyed by region name
*/
-  @InterfaceAudience.Private
   R extends Message Mapbyte[], R batchCoprocessorService(
   Descriptors.MethodDescriptor methodDescriptor, Message request,
   byte[] startKey, byte[] endKey, R responsePrototype) throws 
ServiceException, Throwable;
@@ -658,7 +654,6 @@ public interface HTableInterface extends Closeable {
* @throws ServiceException
* @throws Throwable
*/
-  @InterfaceAudience.Private
   R extends Message void 
batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
   Message request, byte[] startKey, byte[] endKey, R responsePrototype,
   Batch.CallbackR callback) throws ServiceException, Throwable;

http://git-wip-us.apache.org/repos/asf/hbase/blob/db520b94/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
index 41d7b65..3b175a8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
@@ -19,16 +19,17 @@
 
 package org.apache.hadoop.hbase.client.coprocessor;
 
-import java.io.IOException;
-
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 
+import java.io.IOException;
 
 /**
  * A collection of interfaces and utilities used for interacting with custom 
RPC
  * interfaces exposed by Coprocessors.
  */
-@InterfaceAudience.Private
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class Batch {
   /**
* Defines a unit of work to be executed.
@@ -37,9 +38,7 @@ public abstract class Batch 

git commit: Add note that 0.96 is EOL'd

2014-08-22 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master db520b94c - f8b4b2290


Add note that 0.96 is EOL'd


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f8b4b229
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f8b4b229
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f8b4b229

Branch: refs/heads/master
Commit: f8b4b2290d4cfbdd06d60904f11ad4a8ba538fc8
Parents: db520b9
Author: stack st...@apache.org
Authored: Fri Aug 22 15:28:10 2014 -0700
Committer: stack st...@apache.org
Committed: Fri Aug 22 15:28:10 2014 -0700

--
 src/main/docbkx/upgrading.xml | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f8b4b229/src/main/docbkx/upgrading.xml
--
diff --git a/src/main/docbkx/upgrading.xml b/src/main/docbkx/upgrading.xml
index ca7a986..7c2e5f1 100644
--- a/src/main/docbkx/upgrading.xml
+++ b/src/main/docbkx/upgrading.xml
@@ -145,6 +145,10 @@
 xml:id=upgrade0.96
 titleUpgrading from 0.94.x to 0.96.x/title
 subtitleThe Singularity/subtitle
+notetitleHBase 0.96.x was EOL'd, September 1st, 2014/titlepara
+Do not deploy 0.96.x  Deploy a 0.98.x at least.
+See link 
xlink:href=https://issues.apache.org/jira/browse/HBASE-11642;EOL 0.96/link.
+/para/note
 paraYou will have to stop your old 0.94.x cluster completely to 
upgrade. If you are
 replicating between clusters, both clusters will have to go down 
to upgrade. Make sure
 it is a clean shutdown. The less WAL files around, the faster the 
upgrade will run (the



git commit: HBASE-11323 Add MultiRowMutation tests. (Liu Shaohui)

2014-08-22 Thread larsh
Repository: hbase
Updated Branches:
  refs/heads/0.94 817c2aac1 - 44492624d


HBASE-11323 Add MultiRowMutation tests. (Liu Shaohui)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/44492624
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/44492624
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/44492624

Branch: refs/heads/0.94
Commit: 44492624d4b8a6cf1ce1c7ba595f3a3447f9f536
Parents: 817c2aa
Author: Lars Hofhansl la...@apache.org
Authored: Fri Aug 22 21:42:33 2014 -0700
Committer: Lars Hofhansl la...@apache.org
Committed: Fri Aug 22 21:42:33 2014 -0700

--
 .../TestMultiRowMutationProtocol.java   | 211 +++
 1 file changed, 211 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/44492624/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMultiRowMutationProtocol.java
--
diff --git 
a/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMultiRowMutationProtocol.java
 
b/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMultiRowMutationProtocol.java
new file mode 100644
index 000..8afdb07
--- /dev/null
+++ 
b/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMultiRowMutationProtocol.java
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2011 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * A test class to cover multi row mutations protocol
+ */
+@Category(MediumTests.class)
+public class TestMultiRowMutationProtocol {
+
+  private static final byte[] TEST_TABLE = Bytes.toBytes(TestTable);
+  private static final byte[] TEST_FAMILY = Bytes.toBytes(TestFamily);
+  private static final byte[] INVALID_FAMILY = Bytes.toBytes(InvalidFamily);
+  private static final byte[] TEST_QUALIFIER = Bytes.toBytes(TestQualifier);
+  private static byte[] ROW = Bytes.toBytes(testRow);
+  
+  private static final int ROWSIZE = 20;
+  private static final int rowSeperator1 = 5;
+  private static final int rowSeperator2 = 12;
+  private static byte[][] ROWS = makeN(ROW, ROWSIZE);
+
+  private static HBaseTestingUtility util = new HBaseTestingUtility();
+  private static MiniHBaseCluster cluster = null;
+  
+  private HTable table = null;
+  
+  @BeforeClass
+  public static void setupBeforeClass() throws Exception {
+// set configure to indicate which cp should be loaded
+Configuration conf = util.getConfiguration();
+conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+  org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint);
+
+util.startMiniCluster(2);
+cluster = util.getMiniHBaseCluster();
+
+HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
+util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY,
+new byte[][] { HConstants.EMPTY_BYTE_ARRAY,
+ROWS[rowSeperator1], ROWS[rowSeperator2] });
+
+for (int i = 0; i  ROWSIZE; i++) {
+  Put put = new Put(ROWS[i]);
+  put.setWriteToWAL(false);
+  put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
+  table.put(put);
+