http://git-wip-us.apache.org/repos/asf/hbase/blob/70f330dc/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java index 4deab19..d7b8461 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -21048,6 +21048,35 @@ public final class ClientProtos { * <code>optional bool assign_seq_num = 3;</code> */ boolean getAssignSeqNum(); + + // optional .hbase.pb.DelegationToken fs_token = 4; + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + boolean hasFsToken(); + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken(); + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); + + // optional string bulk_token = 5; + /** + * <code>optional string bulk_token = 5;</code> + */ + boolean hasBulkToken(); + /** + * <code>optional string bulk_token = 5;</code> + */ + java.lang.String getBulkToken(); + /** + * <code>optional string bulk_token = 5;</code> + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); } /** * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} @@ -21132,6 +21161,24 @@ public final class ClientProtos { assignSeqNum_ = input.readBool(); break; } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = fsToken_.toBuilder(); + } + fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(fsToken_); + fsToken_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + case 42: { + bitField0_ |= 0x00000008; + bulkToken_ = input.readBytes(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -21867,10 +21914,77 @@ public final class ClientProtos { return assignSeqNum_; } + // optional .hbase.pb.DelegationToken fs_token = 4; + public static final int FS_TOKEN_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_; + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public boolean hasFsToken() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { + return fsToken_; + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { + return fsToken_; + } + + // optional string bulk_token = 5; + public static final int BULK_TOKEN_FIELD_NUMBER = 5; + private java.lang.Object bulkToken_; + /** + * <code>optional string bulk_token = 5;</code> + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } + return s; + } + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); familyPath_ = java.util.Collections.emptyList(); assignSeqNum_ = false; + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + bulkToken_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -21907,6 +22021,12 @@ public final class ClientProtos { if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, assignSeqNum_); } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(4, fsToken_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(5, getBulkTokenBytes()); + } getUnknownFields().writeTo(output); } @@ -21928,6 +22048,14 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, assignSeqNum_); } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, fsToken_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, getBulkTokenBytes()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -21963,6 +22091,16 @@ public final class ClientProtos { result = result && (getAssignSeqNum() == other.getAssignSeqNum()); } + result = result && (hasFsToken() == other.hasFsToken()); + if (hasFsToken()) { + result = result && getFsToken() + .equals(other.getFsToken()); + } + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -21988,6 +22126,14 @@ public final class ClientProtos { hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getAssignSeqNum()); } + if (hasFsToken()) { + hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getFsToken().hashCode(); + } + if (hasBulkToken()) { + hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -22097,6 +22243,7 @@ public final class ClientProtos { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); getFamilyPathFieldBuilder(); + getFsTokenFieldBuilder(); } } private static Builder create() { @@ -22119,6 +22266,14 @@ public final class ClientProtos { } assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000004); + if (fsTokenBuilder_ == null) { + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + } else { + fsTokenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -22168,6 +22323,18 @@ public final class ClientProtos { to_bitField0_ |= 0x00000002; } result.assignSeqNum_ = assignSeqNum_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + if (fsTokenBuilder_ == null) { + result.fsToken_ = fsToken_; + } else { + result.fsToken_ = fsTokenBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000008; + } + result.bulkToken_ = bulkToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -22216,6 +22383,14 @@ public final class ClientProtos { if (other.hasAssignSeqNum()) { setAssignSeqNum(other.getAssignSeqNum()); } + if (other.hasFsToken()) { + mergeFsToken(other.getFsToken()); + } + if (other.hasBulkToken()) { + bitField0_ |= 0x00000010; + bulkToken_ = other.bulkToken_; + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -22647,6 +22822,197 @@ public final class ClientProtos { return this; } + // optional .hbase.pb.DelegationToken fs_token = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_; + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public boolean hasFsToken() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { + if (fsTokenBuilder_ == null) { + return fsToken_; + } else { + return fsTokenBuilder_.getMessage(); + } + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { + if (fsTokenBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + fsToken_ = value; + onChanged(); + } else { + fsTokenBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public Builder setFsToken( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) { + if (fsTokenBuilder_ == null) { + fsToken_ = builderForValue.build(); + onChanged(); + } else { + fsTokenBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { + if (fsTokenBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) { + fsToken_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); + } else { + fsToken_ = value; + } + onChanged(); + } else { + fsTokenBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public Builder clearFsToken() { + if (fsTokenBuilder_ == null) { + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + onChanged(); + } else { + fsTokenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getFsTokenFieldBuilder().getBuilder(); + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { + if (fsTokenBuilder_ != null) { + return fsTokenBuilder_.getMessageOrBuilder(); + } else { + return fsToken_; + } + } + /** + * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> + getFsTokenFieldBuilder() { + if (fsTokenBuilder_ == null) { + fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>( + fsToken_, + getParentForChildren(), + isClean()); + fsToken_ = null; + } + return fsTokenBuilder_; + } + + // optional string bulk_token = 5; + private java.lang.Object bulkToken_ = ""; + /** + * <code>optional string bulk_token = 5;</code> + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public Builder setBulkToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + bulkToken_ = value; + onChanged(); + return this; + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000010); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + /** + * <code>optional string bulk_token = 5;</code> + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + bulkToken_ = value; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest) } @@ -22784,17 +23150,3302 @@ public final class ClientProtos { } private void initFields() { - loaded_ = false; + loaded_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLoaded()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, loaded_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, loaded_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; + + boolean result = true; + result = result && (hasLoaded() == other.hasLoaded()); + if (hasLoaded()) { + result = result && (getLoaded() + == other.getLoaded()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLoaded()) { + hash = (37 * hash) + LOADED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLoaded()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + loaded_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.loaded_ = loaded_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; + if (other.hasLoaded()) { + setLoaded(other.getLoaded()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLoaded()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool loaded = 1; + private boolean loaded_ ; + /** + * <code>required bool loaded = 1;</code> + */ + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required bool loaded = 1;</code> + */ + public boolean getLoaded() { + return loaded_; + } + /** + * <code>required bool loaded = 1;</code> + */ + public Builder setLoaded(boolean value) { + bitField0_ |= 0x00000001; + loaded_ = value; + onChanged(); + return this; + } + /** + * <code>required bool loaded = 1;</code> + */ + public Builder clearLoaded() { + bitField0_ = (bitField0_ & ~0x00000001); + loaded_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse) + } + + static { + defaultInstance = new BulkLoadHFileResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse) + } + + public interface DelegationTokenOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes identifier = 1; + /** + * <code>optional bytes identifier = 1;</code> + */ + boolean hasIdentifier(); + /** + * <code>optional bytes identifier = 1;</code> + */ + com.google.protobuf.ByteString getIdentifier(); + + // optional bytes password = 2; + /** + * <code>optional bytes password = 2;</code> + */ + boolean hasPassword(); + /** + * <code>optional bytes password = 2;</code> + */ + com.google.protobuf.ByteString getPassword(); + + // optional string kind = 3; + /** + * <code>optional string kind = 3;</code> + */ + boolean hasKind(); + /** + * <code>optional string kind = 3;</code> + */ + java.lang.String getKind(); + /** + * <code>optional string kind = 3;</code> + */ + com.google.protobuf.ByteString + getKindBytes(); + + // optional string service = 4; + /** + * <code>optional string service = 4;</code> + */ + boolean hasService(); + /** + * <code>optional string service = 4;</code> + */ + java.lang.String getService(); + /** + * <code>optional string service = 4;</code> + */ + com.google.protobuf.ByteString + getServiceBytes(); + } + /** + * Protobuf type {@code hbase.pb.DelegationToken} + */ + public static final class DelegationToken extends + com.google.protobuf.GeneratedMessage + implements DelegationTokenOrBuilder { + // Use DelegationToken.newBuilder() to construct. + private DelegationToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final DelegationToken defaultInstance; + public static DelegationToken getDefaultInstance() { + return defaultInstance; + } + + public DelegationToken getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DelegationToken( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + identifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + password_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + kind_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + service_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder.class); + } + + public static com.google.protobuf.Parser<DelegationToken> PARSER = + new com.google.protobuf.AbstractParser<DelegationToken>() { + public DelegationToken parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DelegationToken(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser<DelegationToken> getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional bytes identifier = 1; + public static final int IDENTIFIER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString identifier_; + /** + * <code>optional bytes identifier = 1;</code> + */ + public boolean hasIdentifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>optional bytes identifier = 1;</code> + */ + public com.google.protobuf.ByteString getIdentifier() { + return identifier_; + } + + // optional bytes password = 2; + public static final int PASSWORD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString password_; + /** + * <code>optional bytes password = 2;</code> + */ + public boolean hasPassword() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>optional bytes password = 2;</code> + */ + public com.google.protobuf.ByteString getPassword() { + return password_; + } + + // optional string kind = 3; + public static final int KIND_FIELD_NUMBER = 3; + private java.lang.Object kind_; + /** + * <code>optional string kind = 3;</code> + */ + public boolean hasKind() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * <code>optional string kind = 3;</code> + */ + public java.lang.String getKind() { + java.lang.Object ref = kind_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + kind_ = s; + } + return s; + } + } + /** + * <code>optional string kind = 3;</code> + */ + public com.google.protobuf.ByteString + getKindBytes() { + java.lang.Object ref = kind_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kind_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string service = 4; + public static final int SERVICE_FIELD_NUMBER = 4; + private java.lang.Object service_; + /** + * <code>optional string service = 4;</code> + */ + public boolean hasService() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * <code>optional string service = 4;</code> + */ + public java.lang.String getService() { + java.lang.Object ref = service_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + service_ = s; + } + return s; + } + } + /** + * <code>optional string service = 4;</code> + */ + public com.google.protobuf.ByteString + getServiceBytes() { + java.lang.Object ref = service_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + service_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + identifier_ = com.google.protobuf.ByteString.EMPTY; + password_ = com.google.protobuf.ByteString.EMPTY; + kind_ = ""; + service_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, identifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, password_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getKindBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getServiceBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, identifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, password_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getKindBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getServiceBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) obj; + + boolean result = true; + result = result && (hasIdentifier() == other.hasIdentifier()); + if (hasIdentifier()) { + result = result && getIdentifier() + .equals(other.getIdentifier()); + } + result = result && (hasPassword() == other.hasPassword()); + if (hasPassword()) { + result = result && getPassword() + .equals(other.getPassword()); + } + result = result && (hasKind() == other.hasKind()); + if (hasKind()) { + result = result && getKind() + .equals(other.getKind()); + } + result = result && (hasService() == other.hasService()); + if (hasService()) { + result = result && getService() + .equals(other.getService()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasIdentifier()) { + hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; + hash = (53 * hash) + getIdentifier().hashCode(); + } + if (hasPassword()) { + hash = (37 * hash) + PASSWORD_FIELD_NUMBER; + hash = (53 * hash) + getPassword().hashCode(); + } + if (hasKind()) { + hash = (37 * hash) + KIND_FIELD_NUMBER; + hash = (53 * hash) + getKind().hashCode(); + } + if (hasService()) { + hash = (37 * hash) + SERVICE_FIELD_NUMBER; + hash = (53 * hash) + getService().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.DelegationToken} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + identifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + password_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + kind_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + service_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.identifier_ = identifier_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.password_ = password_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.kind_ = kind_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.service_ = service_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) return this; + if (other.hasIdentifier()) { + setIdentifier(other.getIdentifier()); + } + if (other.hasPassword()) { + setPassword(other.getPassword()); + } + if (other.hasKind()) { + bitField0_ |= 0x00000004; + kind_ = other.kind_; + onChanged(); + } + if (other.hasService()) { + bitField0_ |= 0x00000008; + service_ = other.service_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional bytes identifier = 1; + private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * <code>optional bytes identifier = 1;</code> + */ + public boolean hasIdentifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>optional bytes identifier = 1;</code> + */ + public com.google.protobuf.ByteString getIdentifier() { + return identifier_; + } + /** + * <code>optional bytes identifier = 1;</code> + */ + public Builder setIdentifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + identifier_ = value; + onChanged(); + return this; + } + /** + * <code>optional bytes identifier = 1;</code> + */ + public Builder clearIdentifier() { + bitField0_ = (bitField0_ & ~0x00000001); + identifier_ = getDefaultInstance().getIdentifier(); + onChanged(); + return this; + } + + // optional bytes password = 2; + private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; + /** + * <code>optional bytes password = 2;</code> + */ + public boolean hasPassword() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>optional bytes password = 2;</code> + */ + public com.google.protobuf.ByteString getPassword() { + return password_; + } + /** + * <code>optional bytes password = 2;</code> + */ + public Builder setPassword(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + password_ = value; + onChanged(); + return this; + } + /** + * <code>optional bytes password = 2;</code> + */ + public Builder clearPassword() { + bitField0_ = (bitField0_ & ~0x00000002); + password_ = getDefaultInstance().getPassword(); + onChanged(); + return this; + } + + // optional string kind = 3; + private java.lang.Object kind_ = ""; + /** + * <code>optional string kind = 3;</code> + */ + public boolean hasKind() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * <code>optional string kind = 3;</code> + */ + public java.lang.String getKind() { + java.lang.Object ref = kind_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + kind_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <code>optional string kind = 3;</code> + */ + public com.google.protobuf.ByteString + getKindBytes() { + java.lang.Object ref = kind_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kind_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * <code>optional string kind = 3;</code> + */ + public Builder setKind( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + kind_ = value; + onChanged(); + return this; + } + /** + * <code>optional string kind = 3;</code> + */ + public Builder clearKind() { + bitField0_ = (bitField0_ & ~0x00000004); + kind_ = getDefaultInstance().getKind(); + onChanged(); + return this; + } + /** + * <code>optional string kind = 3;</code> + */ + public Builder setKindBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + kind_ = value; + onChanged(); + return this; + } + + // optional string service = 4; + private java.lang.Object service_ = ""; + /** + * <code>optional string service = 4;</code> + */ + public boolean hasService() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * <code>optional string service = 4;</code> + */ + public java.lang.String getService() { + java.lang.Object ref = service_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + service_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <code>optional string service = 4;</code> + */ + public com.google.protobuf.ByteString + getServiceBytes() { + java.lang.Object ref = service_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + service_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * <code>optional string service = 4;</code> + */ + public Builder setService( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + service_ = value; + onChanged(); + return this; + } + /** + * <code>optional string service = 4;</code> + */ + public Builder clearService() { + bitField0_ = (bitField0_ & ~0x00000008); + service_ = getDefaultInstance().getService(); + onChanged(); + return this; + } + /** + * <code>optional string service = 4;</code> + */ + public Builder setServiceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + service_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.DelegationToken) + } + + static { + defaultInstance = new DelegationToken(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken) + } + + public interface PrepareBulkLoadRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .hbase.pb.TableName table_name = 1; + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + boolean hasTableName(); + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); + + // optional .hbase.pb.RegionSpecifier region = 2; + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + boolean hasRegion(); + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + /** + * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} + */ + public static final class PrepareBulkLoadRequest extends + com.google.protobuf.GeneratedMessage + implements PrepareBulkLoadRequestOrBuilder { + // Use PrepareBulkLoadRequest.newBuilder() to construct. + private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PrepareBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PrepareBulkLoadRequest defaultInstance; + public static PrepareBulkLoadRequest getDefaultInstance() { + return defaultInstance; + } + + public PrepareBulkLoadRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrepareBulkLoadRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = tableName_.toBuilder(); + } + tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tableName_); + tableName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); + } + + public static com.google.protobuf.Parser<PrepareBulkLoadRequest> PARSER = + new com.google.protobuf.AbstractParser<PrepareBulkLoadRequest>() { + public PrepareBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser<PrepareBulkLoadRequest> getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .hbase.pb.TableName table_name = 1; + public static final int TABLE_NAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { + return tableName_; + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + return tableName_; + } + + // optional .hbase.pb.RegionSpecifier region = 2; + public static final int REGION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasRegion()) { + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableNameFieldBuilder(); + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (tableNameBuilder_ == null) { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + } else { + tableNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (tableNameBuilder_ == null) { + result.tableName_ = tableName_; + } else { + result.tableName_ = tableNameBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + mergeTableName(other.getTableName()); + } + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!getTableName().isInitialized()) { + + return false; + } + if (hasRegion()) { + if (!getRegion().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .hbase.pb.TableName table_name = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { + if (tableNameBuilder_ == null) { + return tableName_; + } else { + return tableNameBuilder_.getMessage(); + } + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableName_ = value; + onChanged(); + } else { + tableNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public Builder setTableName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { + if (tableNameBuilder_ == null) { + tableName_ = builderForValue.build(); + onChanged(); + } else { + tableNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { + tableName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); + } else { + tableName_ = value; + } + onChanged(); + } else { + tableNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public Builder clearTableName() { + if (tableNameBuilder_ == null) { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + onChanged(); + } else { + tableNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableNameFieldBuilder().getBuilder(); + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + if (tableNameBuilder_ != null) { + return tableNameBuilder_.getMessageOrBuilder(); + } else { + return tableName_; + } + } + /** + * <code>required .hbase.pb.TableName table_name = 1;</code> + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> + getTableNameFieldBuilder() { + if (tableNameBuilder_ == null) { + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( + tableName_, + getParentForChildren(), + isClean()); + tableName_ = null; + } + return tableNameBuilder_; + } + + // optional .hbase.pb.RegionSpecifier region = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> + */ + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).
<TRUNCATED>