Repository: hbase
Updated Branches:
  refs/heads/branch-2 76199a338 -> e294dcd80


Revert "HBASE-16478 Rename WALKey in PB to WALEdit This is a rebase of Enis's 
original patch"
Not worth the difference it introduces; means hbase-protocol can no
longer parse a WAL entry.

This reverts commit 9a2e680caeb8c6627357ff5a0963170f09e65414.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e294dcd8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e294dcd8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e294dcd8

Branch: refs/heads/branch-2
Commit: e294dcd80d2f833b6b8d2ebb088c1ccd3545212a
Parents: 76199a3
Author: Michael Stack <st...@apache.org>
Authored: Wed Sep 20 15:27:37 2017 -0700
Committer: Michael Stack <st...@apache.org>
Committed: Wed Sep 20 15:36:47 2017 -0700

----------------------------------------------------------------------
 .../src/main/protobuf/Admin.proto               |  2 +-
 .../src/main/protobuf/WAL.proto                 | 13 +++++--
 .../hbase/protobuf/ReplicationProtbufUtil.java  |  2 +-
 .../hbase/regionserver/RSRpcServices.java       | 30 +++++++++-------
 .../regionserver/wal/ProtobufLogReader.java     |  9 +++--
 .../regionserver/ReplicationSink.java           |  8 ++---
 .../org/apache/hadoop/hbase/wal/WALKey.java     | 36 ++++++++++----------
 .../apache/hadoop/hbase/wal/WALSplitter.java    | 16 ++++-----
 .../regionserver/TestReplicationSink.java       |  5 +--
 9 files changed, 67 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-protocol-shaded/src/main/protobuf/Admin.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/Admin.proto 
b/hbase-protocol-shaded/src/main/protobuf/Admin.proto
index db5a3be..62aac9a 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Admin.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Admin.proto
@@ -172,7 +172,7 @@ message UpdateFavoredNodesResponse {
 
 // Protocol buffer version of WAL for replication
 message WALEntry {
-  required WALEdit edit = 1;
+  required WALKey key = 1;
   // Following may be null if the KVs/Cells are carried along the side in a 
cellblock (See
   // RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next 
field is null
   // and associated_cell_count has count of Cells associated w/ this WALEntry

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-protocol-shaded/src/main/protobuf/WAL.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/WAL.proto 
b/hbase-protocol-shaded/src/main/protobuf/WAL.proto
index 4c9a171..81e5650 100644
--- a/hbase-protocol-shaded/src/main/protobuf/WAL.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/WAL.proto
@@ -34,9 +34,10 @@ message WALHeader {
 }
 
 /*
- * Protocol buffer version of WALEdit;
+ * Protocol buffer version of WALKey; see WALKey comment, not really a key but 
WALEdit header
+ * for some KVs
  */
-message WALEdit {
+message WALKey {
   required bytes encoded_region_name = 1;
   required bytes table_name = 2;
   required uint64 log_sequence_number = 3;
@@ -61,6 +62,14 @@ message WALEdit {
   optional uint64 nonceGroup = 9;
   optional uint64 nonce = 10;
   optional uint64 orig_sequence_number = 11;
+
+/*
+  optional CustomEntryType custom_entry_type = 9;
+
+  enum CustomEntryType {
+    COMPACTION = 0;
+  }
+*/
 }
 
 enum ScopeType {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 96272c7..0967e94 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -109,7 +109,7 @@ public class ReplicationProtbufUtil {
     for (Entry entry: entries) {
       entryBuilder.clear();
       // TODO: this duplicates a lot in WALKey#getBuilder
-      WALProtos.WALEdit.Builder keyBuilder = entryBuilder.getEditBuilder();
+      WALProtos.WALKey.Builder keyBuilder = entryBuilder.getKeyBuilder();
       WALKey key = entry.getKey();
       keyBuilder.setEncodedRegionName(
           UnsafeByteOperations.unsafeWrap(encodedRegionName == null

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 61c725b..24281c7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -597,13 +597,13 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
    * @param row
    * @param family
    * @param qualifier
-   * @param op
+   * @param compareOp
    * @param comparator @throws IOException
    */
   private boolean checkAndRowMutate(final Region region, final 
List<ClientProtos.Action> actions,
-      final CellScanner cellScanner, byte[] row, byte[] family, byte[] 
qualifier,
-      CompareOperator op, ByteArrayComparable comparator, 
RegionActionResult.Builder builder,
-      ActivePolicyEnforcement spaceQuotaEnforcement) throws IOException {
+                                    final CellScanner cellScanner, byte[] row, 
byte[] family, byte[] qualifier,
+                                    CompareOperator op, ByteArrayComparable 
comparator, RegionActionResult.Builder builder,
+                                    ActivePolicyEnforcement 
spaceQuotaEnforcement) throws IOException {
     if (!region.getRegionInfo().isMetaTable()) {
       regionServer.cacheFlusher.reclaimMemStoreMemory();
     }
@@ -648,6 +648,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 
   /**
    * Execute an append mutation.
+   *
+   * @param region
+   * @param m
+   * @param cellScanner
    * @return result to return to client if default operation should be
    * bypassed as indicated by RegionObserver, null otherwise
    * @throws IOException
@@ -2073,7 +2077,7 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
         // empty input
         return ReplicateWALEntryResponse.newBuilder().build();
       }
-      ByteString regionName = entries.get(0).getEdit().getEncodedRegionName();
+      ByteString regionName = entries.get(0).getKey().getEncodedRegionName();
       Region region = 
regionServer.getRegionByEncodedName(regionName.toStringUtf8());
       RegionCoprocessorHost coprocessorHost =
           ServerRegionReplicaUtil.isDefaultReplica(region.getRegionInfo())
@@ -2086,19 +2090,19 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
       Durability durability = isPrimary ? Durability.USE_DEFAULT : 
Durability.SKIP_WAL;
 
       for (WALEntry entry : entries) {
-        if (!regionName.equals(entry.getEdit().getEncodedRegionName())) {
+        if (!regionName.equals(entry.getKey().getEncodedRegionName())) {
           throw new NotServingRegionException("Replay request contains entries 
from multiple " +
               "regions. First region:" + regionName.toStringUtf8() + " , other 
region:"
-              + entry.getEdit().getEncodedRegionName());
+              + entry.getKey().getEncodedRegionName());
         }
         if (regionServer.nonceManager != null && isPrimary) {
-          long nonceGroup = entry.getEdit().hasNonceGroup()
-            ? entry.getEdit().getNonceGroup() : HConstants.NO_NONCE;
-          long nonce = entry.getEdit().hasNonce() ? entry.getEdit().getNonce() 
: HConstants.NO_NONCE;
+          long nonceGroup = entry.getKey().hasNonceGroup()
+            ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE;
+          long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : 
HConstants.NO_NONCE;
           regionServer.nonceManager.reportOperationFromWal(
               nonceGroup,
               nonce,
-              entry.getEdit().getWriteTime());
+              entry.getKey().getWriteTime());
         }
         Pair<WALKey, WALEdit> walEntry = (coprocessorHost == null) ? null : 
new Pair<>();
         List<WALSplitter.MutationReplay> edits = 
WALSplitter.getMutationsFromWALEntry(entry,
@@ -2117,8 +2121,8 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
           // HBASE-17924
           // sort to improve lock efficiency
           Collections.sort(edits);
-          long replaySeqId = (entry.getEdit().hasOrigSequenceNumber()) ?
-            entry.getEdit().getOrigSequenceNumber() : 
entry.getEdit().getLogSequenceNumber();
+          long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ?
+            entry.getKey().getOrigSequenceNumber() : 
entry.getKey().getLogSequenceNumber();
           OperationStatus[] result = doReplayBatchOp(region, edits, 
replaySeqId);
           // check if it's a partial success
           for (int i = 0; result != null && i < result.length; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index 679e515..78c055e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALEdit;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.wal.WAL.Entry;
@@ -333,7 +333,7 @@ public class ProtobufLogReader extends ReaderBase {
         }
         return false;
       }
-      WALEdit.Builder builder = WALEdit.newBuilder();
+      WALKey.Builder builder = WALKey.newBuilder();
       long size = 0;
       try {
         long available = -1;
@@ -363,12 +363,11 @@ public class ProtobufLogReader extends ReaderBase {
           throw new EOFException("Partial PB while reading WAL, " +
               "probably an unexpected EOF, ignoring. current offset=" + 
this.inputStream.getPos());
         }
-        WALEdit walKey = builder.build();
+        WALKey walKey = builder.build();
         entry.getKey().readFieldsFromPb(walKey, this.byteStringUncompressor);
         if (!walKey.hasFollowingKvCount() || 0 == 
walKey.getFollowingKvCount()) {
           if (LOG.isTraceEnabled()) {
-            LOG.trace("WALKey has no KVs that follow it; trying the next one. 
current offset=" +
-              this.inputStream.getPos());
+            LOG.trace("WALKey has no KVs that follow it; trying the next one. 
current offset=" + this.inputStream.getPos());
           }
           continue;
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
index 116f50f..0fbc74f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
@@ -159,7 +159,7 @@ public class ReplicationSink {
 
       for (WALEntry entry : entries) {
         TableName table =
-            TableName.valueOf(entry.getEdit().getTableName().toByteArray());
+            TableName.valueOf(entry.getKey().getTableName().toByteArray());
         Cell previousCell = null;
         Mutation m = null;
         int count = entry.getAssociatedCellCount();
@@ -183,8 +183,8 @@ public class ReplicationSink {
                   CellUtil.isDelete(cell) ? new Delete(cell.getRowArray(), 
cell.getRowOffset(),
                       cell.getRowLength()) : new Put(cell.getRowArray(), 
cell.getRowOffset(),
                       cell.getRowLength());
-              List<UUID> clusterIds = new 
ArrayList<>(entry.getEdit().getClusterIdsList().size());
-              for (HBaseProtos.UUID clusterId : 
entry.getEdit().getClusterIdsList()) {
+              List<UUID> clusterIds = new 
ArrayList<>(entry.getKey().getClusterIdsList().size());
+              for (HBaseProtos.UUID clusterId : 
entry.getKey().getClusterIdsList()) {
                 clusterIds.add(toUUID(clusterId));
               }
               m.setClusterIds(clusterIds);
@@ -221,7 +221,7 @@ public class ReplicationSink {
       }
 
       int size = entries.size();
-      this.metrics.setAgeOfLastAppliedOp(entries.get(size - 
1).getEdit().getWriteTime());
+      this.metrics.setAgeOfLastAppliedOp(entries.get(size - 
1).getKey().getWriteTime());
       this.metrics.applyBatch(size + hfilesReplicated, hfilesReplicated);
       this.totalReplicatedEdits.addAndGet(totalReplicated);
     } catch (IOException ex) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
index 9c696af..fd40ec4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
@@ -539,9 +539,9 @@ public class WALKey implements SequenceId, 
Comparable<WALKey> {
     this.encodedRegionName = encodedRegionName;
   }
 
-  public WALProtos.WALEdit.Builder getBuilder(
+  public WALProtos.WALKey.Builder getBuilder(
       WALCellCodec.ByteStringCompressor compressor) throws IOException {
-    WALProtos.WALEdit.Builder builder = WALProtos.WALEdit.newBuilder();
+    WALProtos.WALKey.Builder builder = WALProtos.WALKey.newBuilder();
     if (compressionContext == null) {
       
builder.setEncodedRegionName(UnsafeByteOperations.unsafeWrap(this.encodedRegionName));
       
builder.setTableName(UnsafeByteOperations.unsafeWrap(this.tablename.getName()));
@@ -580,42 +580,42 @@ public class WALKey implements SequenceId, 
Comparable<WALKey> {
     return builder;
   }
 
-  public void readFieldsFromPb(WALProtos.WALEdit walEdit,
+  public void readFieldsFromPb(WALProtos.WALKey walKey,
                                WALCellCodec.ByteStringUncompressor 
uncompressor)
       throws IOException {
     if (this.compressionContext != null) {
       this.encodedRegionName = uncompressor.uncompress(
-          walEdit.getEncodedRegionName(), compressionContext.regionDict);
+          walKey.getEncodedRegionName(), compressionContext.regionDict);
       byte[] tablenameBytes = uncompressor.uncompress(
-      walEdit.getTableName(), compressionContext.tableDict);
+          walKey.getTableName(), compressionContext.tableDict);
       this.tablename = TableName.valueOf(tablenameBytes);
     } else {
-      this.encodedRegionName = walEdit.getEncodedRegionName().toByteArray();
-      this.tablename = TableName.valueOf(walEdit.getTableName().toByteArray());
+      this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
+      this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
     }
     clusterIds.clear();
-    for (HBaseProtos.UUID clusterId : walEdit.getClusterIdsList()) {
+    for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
       clusterIds.add(new UUID(clusterId.getMostSigBits(), 
clusterId.getLeastSigBits()));
     }
-    if (walEdit.hasNonceGroup()) {
-      this.nonceGroup = walEdit.getNonceGroup();
+    if (walKey.hasNonceGroup()) {
+      this.nonceGroup = walKey.getNonceGroup();
     }
-    if (walEdit.hasNonce()) {
-      this.nonce = walEdit.getNonce();
+    if (walKey.hasNonce()) {
+      this.nonce = walKey.getNonce();
     }
     this.replicationScope = null;
-    if (walEdit.getScopesCount() > 0) {
+    if (walKey.getScopesCount() > 0) {
       this.replicationScope = new TreeMap<>(Bytes.BYTES_COMPARATOR);
-      for (FamilyScope scope : walEdit.getScopesList()) {
+      for (FamilyScope scope : walKey.getScopesList()) {
         byte[] family = (compressionContext == null) ? 
scope.getFamily().toByteArray() :
           uncompressor.uncompress(scope.getFamily(), 
compressionContext.familyDict);
         this.replicationScope.put(family, scope.getScopeType().getNumber());
       }
     }
-    setSequenceId(walEdit.getLogSequenceNumber());
-    this.writeTime = walEdit.getWriteTime();
-    if(walEdit.hasOrigSequenceNumber()) {
-      this.origLogSeqNum = walEdit.getOrigSequenceNumber();
+    setSequenceId(walKey.getLogSequenceNumber());
+    this.writeTime = walKey.getWriteTime();
+    if(walKey.hasOrigSequenceNumber()) {
+      this.origLogSeqNum = walKey.getOrigSequenceNumber();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 6a7d896..aeacd9d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -2337,8 +2337,8 @@ public class WALSplitter {
       return new ArrayList<>();
     }
 
-    long replaySeqId = (entry.getEdit().hasOrigSequenceNumber()) ?
-      entry.getEdit().getOrigSequenceNumber() : 
entry.getEdit().getLogSequenceNumber();
+    long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ?
+      entry.getKey().getOrigSequenceNumber() : 
entry.getKey().getLogSequenceNumber();
     int count = entry.getAssociatedCellCount();
     List<MutationReplay> mutations = new ArrayList<>();
     Cell previousCell = null;
@@ -2368,9 +2368,9 @@ public class WALSplitter {
         } else {
           m = new Put(cell.getRowArray(), cell.getRowOffset(), 
cell.getRowLength());
           // Puts might come from increment or append, thus we need nonces.
-          long nonceGroup = entry.getEdit().hasNonceGroup()
-              ? entry.getEdit().getNonceGroup() : HConstants.NO_NONCE;
-          long nonce = entry.getEdit().hasNonce() ? entry.getEdit().getNonce() 
: HConstants.NO_NONCE;
+          long nonceGroup = entry.getKey().hasNonceGroup()
+              ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE;
+          long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : 
HConstants.NO_NONCE;
           mutations.add(new MutationReplay(MutationType.PUT, m, nonceGroup, 
nonce));
         }
       }
@@ -2385,10 +2385,10 @@ public class WALSplitter {
 
     // reconstruct WALKey
     if (logEntry != null) {
-      org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALEdit 
walKeyProto =
-          entry.getEdit();
+      org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
walKeyProto =
+          entry.getKey();
       List<UUID> clusterIds = new 
ArrayList<>(walKeyProto.getClusterIdsCount());
-      for (HBaseProtos.UUID uuid : entry.getEdit().getClusterIdsList()) {
+      for (HBaseProtos.UUID uuid : entry.getKey().getClusterIdsList()) {
         clusterIds.add(new UUID(uuid.getMostSigBits(), 
uuid.getLeastSigBits()));
       }
       key = new WALKey(walKeyProto.getEncodedRegionName().toByteArray(), 
TableName.valueOf(

http://git-wip-us.apache.org/repos/asf/hbase/blob/e294dcd8/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
index 3ce59a4..dc0ca08 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
@@ -60,6 +60,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.ReplicationTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -375,7 +376,7 @@ public class TestReplicationSink {
   private WALEntry.Builder createWALEntryBuilder(TableName table) {
     WALEntry.Builder builder = WALEntry.newBuilder();
     builder.setAssociatedCellCount(1);
-    WALProtos.WALEdit.Builder keyBuilder = WALProtos.WALEdit.newBuilder();
+    WALKey.Builder keyBuilder = WALKey.newBuilder();
     UUID.Builder uuidBuilder = UUID.newBuilder();
     
uuidBuilder.setLeastSigBits(HConstants.DEFAULT_CLUSTER_ID.getLeastSignificantBits());
     
uuidBuilder.setMostSigBits(HConstants.DEFAULT_CLUSTER_ID.getMostSignificantBits());
@@ -384,7 +385,7 @@ public class TestReplicationSink {
     keyBuilder.setWriteTime(System.currentTimeMillis());
     
keyBuilder.setEncodedRegionName(UnsafeByteOperations.unsafeWrap(HConstants.EMPTY_BYTE_ARRAY));
     keyBuilder.setLogSequenceNumber(-1);
-    builder.setEdit(keyBuilder.build());
+    builder.setKey(keyBuilder.build());
     return builder;
   }
 }

Reply via email to