http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java new file mode 100644 index 0000000..de6d523 --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java @@ -0,0 +1,386 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/empty.proto + +package org.apache.hadoop.hbase.shaded.com.google.protobuf; + +/** + * <pre> + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + * The JSON representation for `Empty` is empty JSON object `{}`. + * </pre> + * + * Protobuf type {@code google.protobuf.Empty} + */ +public final class Empty extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.protobuf.Empty) + EmptyOrBuilder { + // Use Empty.newBuilder() to construct. + private Empty(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { + super(builder); + } + private Empty() { + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Empty( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.Builder.class); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty) obj; + + boolean result = true; + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * <pre> + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + * The JSON representation for `Empty` is empty JSON object `{}`. + * </pre> + * + * Protobuf type {@code google.protobuf.Empty} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:google.protobuf.Empty) + org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_descriptor; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty build() { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty buildPartial() { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty(this); + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty) { + return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty other) { + if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.getDefaultInstance()) return this; + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:google.protobuf.Empty) + } + + // @@protoc_insertion_point(class_scope:google.protobuf.Empty) + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty(); + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Empty> + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Empty>() { + public Empty parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new Empty(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Empty> parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Empty> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} +
http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyOrBuilder.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyOrBuilder.java new file mode 100644 index 0000000..15befc2 --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyOrBuilder.java @@ -0,0 +1,9 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/empty.proto + +package org.apache.hadoop.hbase.shaded.com.google.protobuf; + +public interface EmptyOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.protobuf.Empty) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { +} http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyProto.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyProto.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyProto.java new file mode 100644 index 0000000..5d4cb2a --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyProto.java @@ -0,0 +1,58 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/empty.proto + +package org.apache.hadoop.hbase.shaded.com.google.protobuf; + +public final class EmptyProto { + private EmptyProto() {} + public static void registerAllExtensions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); + } + static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_google_protobuf_Empty_descriptor; + static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_protobuf_Empty_fieldAccessorTable; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\033google/protobuf/empty.proto\022\017google.pr" + + "otobuf\"\007\n\005EmptyBv\n\023com.google.protobufB\n" + + "EmptyProtoP\001Z\'github.com/golang/protobuf" + + "/ptypes/empty\370\001\001\242\002\003GPB\252\002\036Google.Protobuf" + + ".WellKnownTypesb\006proto3" + }; + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_google_protobuf_Empty_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_google_protobuf_Empty_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_protobuf_Empty_descriptor, + new java.lang.String[] { }); + } + + // @@protoc_insertion_point(outer_class_scope) +} http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java new file mode 100644 index 0000000..6355bb3 --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java @@ -0,0 +1,1745 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/type.proto + +package org.apache.hadoop.hbase.shaded.com.google.protobuf; + +/** + * <pre> + * Enum type definition. + * </pre> + * + * Protobuf type {@code google.protobuf.Enum} + */ +public final class Enum extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.protobuf.Enum) + EnumOrBuilder { + // Use Enum.newBuilder() to construct. + private Enum(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { + super(builder); + } + private Enum() { + name_ = ""; + enumvalue_ = java.util.Collections.emptyList(); + options_ = java.util.Collections.emptyList(); + syntax_ = 0; + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Enum( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + enumvalue_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue>(); + mutable_bitField0_ |= 0x00000002; + } + enumvalue_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.parser(), extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + options_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option>(); + mutable_bitField0_ |= 0x00000004; + } + options_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.parser(), extensionRegistry)); + break; + } + case 34: { + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder subBuilder = null; + if (sourceContext_ != null) { + subBuilder = sourceContext_.toBuilder(); + } + sourceContext_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(sourceContext_); + sourceContext_ = subBuilder.buildPartial(); + } + + break; + } + case 40: { + int rawValue = input.readEnum(); + + syntax_ = rawValue; + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + enumvalue_ = java.util.Collections.unmodifiableList(enumvalue_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + options_ = java.util.Collections.unmodifiableList(options_); + } + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.internal_static_google_protobuf_Enum_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.internal_static_google_protobuf_Enum_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + } + } + + public static final int ENUMVALUE_FIELD_NUMBER = 2; + private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue> enumvalue_; + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue> getEnumvalueList() { + return enumvalue_; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder> + getEnumvalueOrBuilderList() { + return enumvalue_; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public int getEnumvalueCount() { + return enumvalue_.size(); + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue getEnumvalue(int index) { + return enumvalue_.get(index); + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder getEnumvalueOrBuilder( + int index) { + return enumvalue_.get(index); + } + + public static final int OPTIONS_FIELD_NUMBER = 3; + private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option> options_; + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option> getOptionsList() { + return options_; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> + getOptionsOrBuilderList() { + return options_; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public int getOptionsCount() { + return options_.size(); + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index) { + return options_.get(index); + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder( + int index) { + return options_.get(index); + } + + public static final int SOURCE_CONTEXT_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext sourceContext_; + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public boolean hasSourceContext() { + return sourceContext_ != null; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext() { + return sourceContext_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance() : sourceContext_; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder() { + return getSourceContext(); + } + + public static final int SYNTAX_FIELD_NUMBER = 5; + private int syntax_; + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public int getSyntaxValue() { + return syntax_; + } + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax() { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax result = org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.valueOf(syntax_); + return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + for (int i = 0; i < enumvalue_.size(); i++) { + output.writeMessage(2, enumvalue_.get(i)); + } + for (int i = 0; i < options_.size(); i++) { + output.writeMessage(3, options_.get(i)); + } + if (sourceContext_ != null) { + output.writeMessage(4, getSourceContext()); + } + if (syntax_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.SYNTAX_PROTO2.getNumber()) { + output.writeEnum(5, syntax_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + for (int i = 0; i < enumvalue_.size(); i++) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(2, enumvalue_.get(i)); + } + for (int i = 0; i < options_.size(); i++) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(3, options_.get(i)); + } + if (sourceContext_ != null) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getSourceContext()); + } + if (syntax_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.SYNTAX_PROTO2.getNumber()) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeEnumSize(5, syntax_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getEnumvalueList() + .equals(other.getEnumvalueList()); + result = result && getOptionsList() + .equals(other.getOptionsList()); + result = result && (hasSourceContext() == other.hasSourceContext()); + if (hasSourceContext()) { + result = result && getSourceContext() + .equals(other.getSourceContext()); + } + result = result && syntax_ == other.syntax_; + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (getEnumvalueCount() > 0) { + hash = (37 * hash) + ENUMVALUE_FIELD_NUMBER; + hash = (53 * hash) + getEnumvalueList().hashCode(); + } + if (getOptionsCount() > 0) { + hash = (37 * hash) + OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getOptionsList().hashCode(); + } + if (hasSourceContext()) { + hash = (37 * hash) + SOURCE_CONTEXT_FIELD_NUMBER; + hash = (53 * hash) + getSourceContext().hashCode(); + } + hash = (37 * hash) + SYNTAX_FIELD_NUMBER; + hash = (53 * hash) + syntax_; + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * <pre> + * Enum type definition. + * </pre> + * + * Protobuf type {@code google.protobuf.Enum} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:google.protobuf.Enum) + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.internal_static_google_protobuf_Enum_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.internal_static_google_protobuf_Enum_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getEnumvalueFieldBuilder(); + getOptionsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + if (enumvalueBuilder_ == null) { + enumvalue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + enumvalueBuilder_.clear(); + } + if (optionsBuilder_ == null) { + options_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + optionsBuilder_.clear(); + } + if (sourceContextBuilder_ == null) { + sourceContext_ = null; + } else { + sourceContext_ = null; + sourceContextBuilder_ = null; + } + syntax_ = 0; + + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.internal_static_google_protobuf_Enum_descriptor; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum build() { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum buildPartial() { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + if (enumvalueBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + enumvalue_ = java.util.Collections.unmodifiableList(enumvalue_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.enumvalue_ = enumvalue_; + } else { + result.enumvalue_ = enumvalueBuilder_.build(); + } + if (optionsBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + options_ = java.util.Collections.unmodifiableList(options_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.options_ = options_; + } else { + result.options_ = optionsBuilder_.build(); + } + if (sourceContextBuilder_ == null) { + result.sourceContext_ = sourceContext_; + } else { + result.sourceContext_ = sourceContextBuilder_.build(); + } + result.syntax_ = syntax_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum) { + return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum other) { + if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (enumvalueBuilder_ == null) { + if (!other.enumvalue_.isEmpty()) { + if (enumvalue_.isEmpty()) { + enumvalue_ = other.enumvalue_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureEnumvalueIsMutable(); + enumvalue_.addAll(other.enumvalue_); + } + onChanged(); + } + } else { + if (!other.enumvalue_.isEmpty()) { + if (enumvalueBuilder_.isEmpty()) { + enumvalueBuilder_.dispose(); + enumvalueBuilder_ = null; + enumvalue_ = other.enumvalue_; + bitField0_ = (bitField0_ & ~0x00000002); + enumvalueBuilder_ = + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getEnumvalueFieldBuilder() : null; + } else { + enumvalueBuilder_.addAllMessages(other.enumvalue_); + } + } + } + if (optionsBuilder_ == null) { + if (!other.options_.isEmpty()) { + if (options_.isEmpty()) { + options_ = other.options_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureOptionsIsMutable(); + options_.addAll(other.options_); + } + onChanged(); + } + } else { + if (!other.options_.isEmpty()) { + if (optionsBuilder_.isEmpty()) { + optionsBuilder_.dispose(); + optionsBuilder_ = null; + options_ = other.options_; + bitField0_ = (bitField0_ & ~0x00000004); + optionsBuilder_ = + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getOptionsFieldBuilder() : null; + } else { + optionsBuilder_.addAllMessages(other.options_); + } + } + } + if (other.hasSourceContext()) { + mergeSourceContext(other.getSourceContext()); + } + if (other.syntax_ != 0) { + setSyntaxValue(other.getSyntaxValue()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + } + } + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + public Builder setNameBytes( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue> enumvalue_ = + java.util.Collections.emptyList(); + private void ensureEnumvalueIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + enumvalue_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue>(enumvalue_); + bitField0_ |= 0x00000002; + } + } + + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder> enumvalueBuilder_; + + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue> getEnumvalueList() { + if (enumvalueBuilder_ == null) { + return java.util.Collections.unmodifiableList(enumvalue_); + } else { + return enumvalueBuilder_.getMessageList(); + } + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public int getEnumvalueCount() { + if (enumvalueBuilder_ == null) { + return enumvalue_.size(); + } else { + return enumvalueBuilder_.getCount(); + } + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue getEnumvalue(int index) { + if (enumvalueBuilder_ == null) { + return enumvalue_.get(index); + } else { + return enumvalueBuilder_.getMessage(index); + } + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder setEnumvalue( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue value) { + if (enumvalueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEnumvalueIsMutable(); + enumvalue_.set(index, value); + onChanged(); + } else { + enumvalueBuilder_.setMessage(index, value); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder setEnumvalue( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder builderForValue) { + if (enumvalueBuilder_ == null) { + ensureEnumvalueIsMutable(); + enumvalue_.set(index, builderForValue.build()); + onChanged(); + } else { + enumvalueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder addEnumvalue(org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue value) { + if (enumvalueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEnumvalueIsMutable(); + enumvalue_.add(value); + onChanged(); + } else { + enumvalueBuilder_.addMessage(value); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder addEnumvalue( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue value) { + if (enumvalueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEnumvalueIsMutable(); + enumvalue_.add(index, value); + onChanged(); + } else { + enumvalueBuilder_.addMessage(index, value); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder addEnumvalue( + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder builderForValue) { + if (enumvalueBuilder_ == null) { + ensureEnumvalueIsMutable(); + enumvalue_.add(builderForValue.build()); + onChanged(); + } else { + enumvalueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder addEnumvalue( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder builderForValue) { + if (enumvalueBuilder_ == null) { + ensureEnumvalueIsMutable(); + enumvalue_.add(index, builderForValue.build()); + onChanged(); + } else { + enumvalueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder addAllEnumvalue( + java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue> values) { + if (enumvalueBuilder_ == null) { + ensureEnumvalueIsMutable(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, enumvalue_); + onChanged(); + } else { + enumvalueBuilder_.addAllMessages(values); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder clearEnumvalue() { + if (enumvalueBuilder_ == null) { + enumvalue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + enumvalueBuilder_.clear(); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public Builder removeEnumvalue(int index) { + if (enumvalueBuilder_ == null) { + ensureEnumvalueIsMutable(); + enumvalue_.remove(index); + onChanged(); + } else { + enumvalueBuilder_.remove(index); + } + return this; + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder getEnumvalueBuilder( + int index) { + return getEnumvalueFieldBuilder().getBuilder(index); + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder getEnumvalueOrBuilder( + int index) { + if (enumvalueBuilder_ == null) { + return enumvalue_.get(index); } else { + return enumvalueBuilder_.getMessageOrBuilder(index); + } + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder> + getEnumvalueOrBuilderList() { + if (enumvalueBuilder_ != null) { + return enumvalueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(enumvalue_); + } + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder addEnumvalueBuilder() { + return getEnumvalueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.getDefaultInstance()); + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder addEnumvalueBuilder( + int index) { + return getEnumvalueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.getDefaultInstance()); + } + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder> + getEnumvalueBuilderList() { + return getEnumvalueFieldBuilder().getBuilderList(); + } + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder> + getEnumvalueFieldBuilder() { + if (enumvalueBuilder_ == null) { + enumvalueBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder>( + enumvalue_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + enumvalue_ = null; + } + return enumvalueBuilder_; + } + + private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option> options_ = + java.util.Collections.emptyList(); + private void ensureOptionsIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + options_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option>(options_); + bitField0_ |= 0x00000004; + } + } + + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.Option, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> optionsBuilder_; + + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option> getOptionsList() { + if (optionsBuilder_ == null) { + return java.util.Collections.unmodifiableList(options_); + } else { + return optionsBuilder_.getMessageList(); + } + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public int getOptionsCount() { + if (optionsBuilder_ == null) { + return options_.size(); + } else { + return optionsBuilder_.getCount(); + } + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index) { + if (optionsBuilder_ == null) { + return options_.get(index); + } else { + return optionsBuilder_.getMessage(index); + } + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder setOptions( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOptionsIsMutable(); + options_.set(index, value); + onChanged(); + } else { + optionsBuilder_.setMessage(index, value); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder setOptions( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder builderForValue) { + if (optionsBuilder_ == null) { + ensureOptionsIsMutable(); + options_.set(index, builderForValue.build()); + onChanged(); + } else { + optionsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder addOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.Option value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOptionsIsMutable(); + options_.add(value); + onChanged(); + } else { + optionsBuilder_.addMessage(value); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder addOptions( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOptionsIsMutable(); + options_.add(index, value); + onChanged(); + } else { + optionsBuilder_.addMessage(index, value); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder addOptions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder builderForValue) { + if (optionsBuilder_ == null) { + ensureOptionsIsMutable(); + options_.add(builderForValue.build()); + onChanged(); + } else { + optionsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder addOptions( + int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder builderForValue) { + if (optionsBuilder_ == null) { + ensureOptionsIsMutable(); + options_.add(index, builderForValue.build()); + onChanged(); + } else { + optionsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder addAllOptions( + java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.Option> values) { + if (optionsBuilder_ == null) { + ensureOptionsIsMutable(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, options_); + onChanged(); + } else { + optionsBuilder_.addAllMessages(values); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder clearOptions() { + if (optionsBuilder_ == null) { + options_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + optionsBuilder_.clear(); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public Builder removeOptions(int index) { + if (optionsBuilder_ == null) { + ensureOptionsIsMutable(); + options_.remove(index); + onChanged(); + } else { + optionsBuilder_.remove(index); + } + return this; + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder getOptionsBuilder( + int index) { + return getOptionsFieldBuilder().getBuilder(index); + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder( + int index) { + if (optionsBuilder_ == null) { + return options_.get(index); } else { + return optionsBuilder_.getMessageOrBuilder(index); + } + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> + getOptionsOrBuilderList() { + if (optionsBuilder_ != null) { + return optionsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(options_); + } + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder addOptionsBuilder() { + return getOptionsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.getDefaultInstance()); + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder addOptionsBuilder( + int index) { + return getOptionsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.getDefaultInstance()); + } + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder> + getOptionsBuilderList() { + return getOptionsFieldBuilder().getBuilderList(); + } + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.Option, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> + getOptionsFieldBuilder() { + if (optionsBuilder_ == null) { + optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.Option, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder>( + options_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + options_ = null; + } + return optionsBuilder_; + } + + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext sourceContext_ = null; + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder> sourceContextBuilder_; + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public boolean hasSourceContext() { + return sourceContextBuilder_ != null || sourceContext_ != null; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext() { + if (sourceContextBuilder_ == null) { + return sourceContext_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance() : sourceContext_; + } else { + return sourceContextBuilder_.getMessage(); + } + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public Builder setSourceContext(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext value) { + if (sourceContextBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + sourceContext_ = value; + onChanged(); + } else { + sourceContextBuilder_.setMessage(value); + } + + return this; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public Builder setSourceContext( + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder builderForValue) { + if (sourceContextBuilder_ == null) { + sourceContext_ = builderForValue.build(); + onChanged(); + } else { + sourceContextBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public Builder mergeSourceContext(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext value) { + if (sourceContextBuilder_ == null) { + if (sourceContext_ != null) { + sourceContext_ = + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.newBuilder(sourceContext_).mergeFrom(value).buildPartial(); + } else { + sourceContext_ = value; + } + onChanged(); + } else { + sourceContextBuilder_.mergeFrom(value); + } + + return this; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public Builder clearSourceContext() { + if (sourceContextBuilder_ == null) { + sourceContext_ = null; + onChanged(); + } else { + sourceContext_ = null; + sourceContextBuilder_ = null; + } + + return this; + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder getSourceContextBuilder() { + + onChanged(); + return getSourceContextFieldBuilder().getBuilder(); + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder() { + if (sourceContextBuilder_ != null) { + return sourceContextBuilder_.getMessageOrBuilder(); + } else { + return sourceContext_ == null ? + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance() : sourceContext_; + } + } + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder> + getSourceContextFieldBuilder() { + if (sourceContextBuilder_ == null) { + sourceContextBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder>( + getSourceContext(), + getParentForChildren(), + isClean()); + sourceContext_ = null; + } + return sourceContextBuilder_; + } + + private int syntax_ = 0; + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public int getSyntaxValue() { + return syntax_; + } + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public Builder setSyntaxValue(int value) { + syntax_ = value; + onChanged(); + return this; + } + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax() { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax result = org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.valueOf(syntax_); + return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.UNRECOGNIZED : result; + } + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public Builder setSyntax(org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax value) { + if (value == null) { + throw new NullPointerException(); + } + + syntax_ = value.getNumber(); + onChanged(); + return this; + } + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + public Builder clearSyntax() { + + syntax_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:google.protobuf.Enum) + } + + // @@protoc_insertion_point(class_scope:google.protobuf.Enum) + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum(); + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Enum> + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Enum>() { + public Enum parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new Enum(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Enum> parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Enum> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumOrBuilder.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumOrBuilder.java new file mode 100644 index 0000000..af5e01e --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumOrBuilder.java @@ -0,0 +1,157 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/type.proto + +package org.apache.hadoop.hbase.shaded.com.google.protobuf; + +public interface EnumOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.protobuf.Enum) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + java.lang.String getName(); + /** + * <pre> + * Enum type name. + * </pre> + * + * <code>optional string name = 1;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getNameBytes(); + + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue> + getEnumvalueList(); + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue getEnumvalue(int index); + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + int getEnumvalueCount(); + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder> + getEnumvalueOrBuilderList(); + /** + * <pre> + * Enum value definitions. + * </pre> + * + * <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder getEnumvalueOrBuilder( + int index); + + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option> + getOptionsList(); + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index); + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + int getOptionsCount(); + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> + getOptionsOrBuilderList(); + /** + * <pre> + * Protocol buffer options. + * </pre> + * + * <code>repeated .google.protobuf.Option options = 3;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder( + int index); + + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + boolean hasSourceContext(); + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext(); + /** + * <pre> + * The source context. + * </pre> + * + * <code>optional .google.protobuf.SourceContext source_context = 4;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder(); + + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + int getSyntaxValue(); + /** + * <pre> + * The source syntax. + * </pre> + * + * <code>optional .google.protobuf.Syntax syntax = 5;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax(); +}
