Repository: hbase Updated Branches: refs/heads/0.98 e41209514 -> 3c46a46dd
http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index 0fe5d3e..9c0447e 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -6593,668 +6593,6 @@ public final class HBaseProtos { // @@protoc_insertion_point(class_scope:TimeRange) } - public interface ColumnFamilyTimeRangeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes column_family = 1; - /** - * <code>required bytes column_family = 1;</code> - */ - boolean hasColumnFamily(); - /** - * <code>required bytes column_family = 1;</code> - */ - com.google.protobuf.ByteString getColumnFamily(); - - // required .TimeRange time_range = 2; - /** - * <code>required .TimeRange time_range = 2;</code> - */ - boolean hasTimeRange(); - /** - * <code>required .TimeRange time_range = 2;</code> - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - /** - * <code>required .TimeRange time_range = 2;</code> - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - } - /** - * Protobuf type {@code ColumnFamilyTimeRange} - * - * <pre> - * ColumnFamily Specific TimeRange - * </pre> - */ - public static final class ColumnFamilyTimeRange extends - com.google.protobuf.GeneratedMessage - implements ColumnFamilyTimeRangeOrBuilder { - // Use ColumnFamilyTimeRange.newBuilder() to construct. - private ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnFamilyTimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnFamilyTimeRange defaultInstance; - public static ColumnFamilyTimeRange getDefaultInstance() { - return defaultInstance; - } - - public ColumnFamilyTimeRange getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ColumnFamilyTimeRange( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - columnFamily_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = timeRange_.toBuilder(); - } - timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(timeRange_); - timeRange_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); - } - - public static com.google.protobuf.Parser<ColumnFamilyTimeRange> PARSER = - new com.google.protobuf.AbstractParser<ColumnFamilyTimeRange>() { - public ColumnFamilyTimeRange parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnFamilyTimeRange(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser<ColumnFamilyTimeRange> getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes column_family = 1; - public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString columnFamily_; - /** - * <code>required bytes column_family = 1;</code> - */ - public boolean hasColumnFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * <code>required bytes column_family = 1;</code> - */ - public com.google.protobuf.ByteString getColumnFamily() { - return columnFamily_; - } - - // required .TimeRange time_range = 2; - public static final int TIME_RANGE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - private void initFields() { - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasColumnFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasTimeRange()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, columnFamily_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, timeRange_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, columnFamily_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, timeRange_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) obj; - - boolean result = true; - result = result && (hasColumnFamily() == other.hasColumnFamily()); - if (hasColumnFamily()) { - result = result && getColumnFamily() - .equals(other.getColumnFamily()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasColumnFamily()) { - hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamily().hashCode(); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ColumnFamilyTimeRange} - * - * <pre> - * ColumnFamily Specific TimeRange - * </pre> - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder<Builder> - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.columnFamily_ = columnFamily_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()) return this; - if (other.hasColumnFamily()) { - setColumnFamily(other.getColumnFamily()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasColumnFamily()) { - - return false; - } - if (!hasTimeRange()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes column_family = 1; - private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; - /** - * <code>required bytes column_family = 1;</code> - */ - public boolean hasColumnFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * <code>required bytes column_family = 1;</code> - */ - public com.google.protobuf.ByteString getColumnFamily() { - return columnFamily_; - } - /** - * <code>required bytes column_family = 1;</code> - */ - public Builder setColumnFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - columnFamily_ = value; - onChanged(); - return this; - } - /** - * <code>required bytes column_family = 1;</code> - */ - public Builder clearColumnFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - columnFamily_ = getDefaultInstance().getColumnFamily(); - onChanged(); - return this; - } - - // required .TimeRange time_range = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - /** - * <code>required .TimeRange time_range = 2;</code> - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ColumnFamilyTimeRange) - } - - static { - defaultInstance = new ColumnFamilyTimeRange(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnFamilyTimeRange) - } - public interface ServerNameOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -16804,11 +16142,6 @@ public final class HBaseProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TimeRange_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnFamilyTimeRange_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnFamilyTimeRange_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor internal_static_ServerName_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -16910,10 +16243,8 @@ public final class HBaseProtos { "gionSpecifierType\022\r\n\005value\030\002 \002(\014\"?\n\023Regi" + "onSpecifierType\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCO" + "DED_REGION_NAME\020\002\"%\n\tTimeRange\022\014\n\004from\030\001" + - " \001(\004\022\n\n\002to\030\002 \001(\004\"N\n\025ColumnFamilyTimeRang" + - "e\022\025\n\rcolumn_family\030\001 \002(\014\022\036\n\ntime_range\030\002", - " \002(\0132\n.TimeRange\"A\n\nServerName\022\021\n\thost_n" + - "ame\030\001 \002(\t\022\014\n\004port\030\002 \001(\r\022\022\n\nstart_code\030\003 " + + " \001(\004\022\n\n\002to\030\002 \001(\004\"A\n\nServerName\022\021\n\thost_n" + + "ame\030\001 \002(\t\022\014\n\004port\030\002 \001(\r\022\022\n\nstart_code\030\003 ", "\001(\004\"\033\n\013Coprocessor\022\014\n\004name\030\001 \002(\t\"-\n\016Name" + "StringPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\"" + ",\n\rNameBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030" + @@ -16921,9 +16252,9 @@ public final class HBaseProtos { "\n\006second\030\002 \002(\014\",\n\rNameInt64Pair\022\014\n\004name\030" + "\001 \001(\t\022\r\n\005value\030\002 \001(\003\"\275\001\n\023SnapshotDescrip" + "tion\022\014\n\004name\030\001 \002(\t\022\r\n\005table\030\002 \001(\t\022\030\n\rcre" + - "ation_time\030\003 \001(\003:\0010\022.\n\004type\030\004 \001(\0162\031.Snap", + "ation_time\030\003 \001(\003:\0010\022.\n\004type\030\004 \001(\0162\031.Snap" + "shotDescription.Type:\005FLUSH\022\017\n\007version\030\005" + - " \001(\005\".\n\004Type\022\014\n\010DISABLED\020\000\022\t\n\005FLUSH\020\001\022\r\n" + + " \001(\005\".\n\004Type\022\014\n\010DISABLED\020\000\022\t\n\005FLUSH\020\001\022\r\n", "\tSKIPFLUSH\020\002\"}\n\024ProcedureDescription\022\021\n\t" + "signature\030\001 \002(\t\022\020\n\010instance\030\002 \001(\t\022\030\n\rcre" + "ation_time\030\003 \001(\003:\0010\022&\n\rconfiguration\030\004 \003" + @@ -16931,9 +16262,9 @@ public final class HBaseProtos { "sg\022\020\n\010long_msg\030\001 \002(\003\"\037\n\tDoubleMsg\022\022\n\ndou" + "ble_msg\030\001 \002(\001\"\'\n\rBigDecimalMsg\022\026\n\016bigdec" + "imal_msg\030\001 \002(\014\"5\n\004UUID\022\026\n\016least_sig_bits" + - "\030\001 \002(\004\022\025\n\rmost_sig_bits\030\002 \002(\004\"K\n\023Namespa", + "\030\001 \002(\004\022\025\n\rmost_sig_bits\030\002 \002(\004\"K\n\023Namespa" + "ceDescriptor\022\014\n\004name\030\001 \002(\014\022&\n\rconfigurat" + - "ion\030\002 \003(\0132\017.NameStringPair\"$\n\020RegionServ" + + "ion\030\002 \003(\0132\017.NameStringPair\"$\n\020RegionServ", "erInfo\022\020\n\010infoPort\030\001 \001(\005*r\n\013CompareType\022" + "\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022" + "\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007" + @@ -16988,98 +16319,92 @@ public final class HBaseProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TimeRange_descriptor, new java.lang.String[] { "From", "To", }); - internal_static_ColumnFamilyTimeRange_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_ColumnFamilyTimeRange_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnFamilyTimeRange_descriptor, - new java.lang.String[] { "ColumnFamily", "TimeRange", }); internal_static_ServerName_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(7); internal_static_ServerName_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ServerName_descriptor, new java.lang.String[] { "HostName", "Port", "StartCode", }); internal_static_Coprocessor_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(8); internal_static_Coprocessor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Coprocessor_descriptor, new java.lang.String[] { "Name", }); internal_static_NameStringPair_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(9); internal_static_NameStringPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NameStringPair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_NameBytesPair_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(10); internal_static_NameBytesPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NameBytesPair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_BytesBytesPair_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(11); internal_static_BytesBytesPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BytesBytesPair_descriptor, new java.lang.String[] { "First", "Second", }); internal_static_NameInt64Pair_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(12); internal_static_NameInt64Pair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NameInt64Pair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_SnapshotDescription_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(13); internal_static_SnapshotDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SnapshotDescription_descriptor, new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", }); internal_static_ProcedureDescription_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(14); internal_static_ProcedureDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ProcedureDescription_descriptor, new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", }); internal_static_EmptyMsg_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(15); internal_static_EmptyMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EmptyMsg_descriptor, new java.lang.String[] { }); internal_static_LongMsg_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(16); internal_static_LongMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LongMsg_descriptor, new java.lang.String[] { "LongMsg", }); internal_static_DoubleMsg_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(17); internal_static_DoubleMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DoubleMsg_descriptor, new java.lang.String[] { "DoubleMsg", }); internal_static_BigDecimalMsg_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(18); internal_static_BigDecimalMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BigDecimalMsg_descriptor, new java.lang.String[] { "BigdecimalMsg", }); internal_static_UUID_descriptor = - getDescriptor().getMessageTypes().get(20); + getDescriptor().getMessageTypes().get(19); internal_static_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UUID_descriptor, new java.lang.String[] { "LeastSigBits", "MostSigBits", }); internal_static_NamespaceDescriptor_descriptor = - getDescriptor().getMessageTypes().get(21); + getDescriptor().getMessageTypes().get(20); internal_static_NamespaceDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NamespaceDescriptor_descriptor, new java.lang.String[] { "Name", "Configuration", }); internal_static_RegionServerInfo_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(21); internal_static_RegionServerInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionServerInfo_descriptor, http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-protocol/src/main/protobuf/Client.proto ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto index 54fdfa5..0526d6c 100644 --- a/hbase-protocol/src/main/protobuf/Client.proto +++ b/hbase-protocol/src/main/protobuf/Client.proto @@ -75,7 +75,6 @@ message Get { // If the row to get doesn't exist, return the // closest row before. optional bool closest_row_before = 11 [default = false]; - repeated ColumnFamilyTimeRange cf_time_range = 13; } message Result { @@ -235,7 +234,6 @@ message Scan { optional bool small = 14; optional bool reversed = 15 [default = false]; optional uint32 caching = 17; - repeated ColumnFamilyTimeRange cf_time_range = 19; } /** http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-protocol/src/main/protobuf/HBase.proto ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/protobuf/HBase.proto b/hbase-protocol/src/main/protobuf/HBase.proto index 994c7a2..3e3d570 100644 --- a/hbase-protocol/src/main/protobuf/HBase.proto +++ b/hbase-protocol/src/main/protobuf/HBase.proto @@ -103,12 +103,6 @@ message TimeRange { optional uint64 to = 2; } -/* ColumnFamily Specific TimeRange */ -message ColumnFamilyTimeRange { - required bytes column_family = 1; - required TimeRange time_range = 2; -} - /* Comparison operators */ enum CompareType { LESS = 0; http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java index de61fa7..90bfd11 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; +import java.util.SortedSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.KeyValue; @@ -75,14 +76,14 @@ public interface KeyValueScanner { * Allows to filter out scanners (both StoreFile and memstore) that we don't * want to use based on criteria such as Bloom filters and timestamp ranges. * @param scan the scan that we are selecting scanners for - * @param store the set of columns in the current column family, or null if + * @param columns the set of columns in the current column family, or null if * not specified by the scan * @param oldestUnexpiredTS the oldest timestamp we are interested in for * this query, based on TTL * @return true if the scanner should be included in the query */ boolean shouldUseScanner( - Scan scan, Store store, long oldestUnexpiredTS + Scan scan, SortedSet<byte[]> columns, long oldestUnexpiredTS ); // "Lazy scanner" optimizations http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java index d9a0c7c..549f15e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java @@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.HeapSize; -import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.regionserver.MemStoreLAB.Allocation; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -658,17 +657,11 @@ public class MemStore implements HeapSize { /** * Check if this memstore may contain the required keys * @param scan - * @param store * @return False if the key definitely does not exist in this Memstore */ - public boolean shouldSeek(Scan scan, Store store, long oldestUnexpiredTS) { - byte[] cf = store.getFamily().getName(); - TimeRange timeRange = scan.getColumnFamilyTimeRange().get(cf); - if (timeRange == null) { - timeRange = scan.getTimeRange(); - } - return (timeRangeTracker.includesTimeRange(timeRange) || - snapshotTimeRangeTracker.includesTimeRange(timeRange)) + public boolean shouldSeek(Scan scan, long oldestUnexpiredTS) { + return (timeRangeTracker.includesTimeRange(scan.getTimeRange()) || + snapshotTimeRangeTracker.includesTimeRange(scan.getTimeRange())) && (Math.max(timeRangeTracker.getMaximumTimestamp(), snapshotTimeRangeTracker.getMaximumTimestamp()) >= oldestUnexpiredTS); @@ -941,9 +934,9 @@ public class MemStore implements HeapSize { } @Override - public boolean shouldUseScanner(Scan scan, Store store, + public boolean shouldUseScanner(Scan scan, SortedSet<byte[]> columns, long oldestUnexpiredTS) { - return shouldSeek(scan, store, oldestUnexpiredTS); + return shouldSeek(scan, oldestUnexpiredTS); } /** http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java index 2e2f0d9..5e7cecb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; +import java.util.SortedSet; import org.apache.commons.lang.NotImplementedException; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -55,7 +56,7 @@ public abstract class NonLazyKeyValueScanner implements KeyValueScanner { } @Override - public boolean shouldUseScanner(Scan scan, Store store, + public boolean shouldUseScanner(Scan scan, SortedSet<byte[]> columns, long oldestUnexpiredTS) { // No optimizations implemented by default. return true; http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java index 534704b..bdd3913 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java @@ -159,12 +159,7 @@ public class ScanQueryMatcher { public ScanQueryMatcher(Scan scan, ScanInfo scanInfo, NavigableSet<byte[]> columns, ScanType scanType, long readPointToUse, long earliestPutTs, long oldestUnexpiredTS, long now, RegionCoprocessorHost regionCoprocessorHost) throws IOException { - TimeRange timeRange = scan.getColumnFamilyTimeRange().get(scanInfo.getFamily()); - if (timeRange == null) { - this.tr = scan.getTimeRange(); - } else { - this.tr = timeRange; - } + this.tr = scan.getTimeRange(); this.rowComparator = scanInfo.getComparator(); this.regionCoprocessorHost = regionCoprocessorHost; this.deletes = instantiateDeleteTracker(); http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index eb23570..5ba7140 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; -import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.CacheConfig; @@ -1163,16 +1162,16 @@ public class StoreFile { /** * Check if this storeFile may contain keys within the TimeRange that * have not expired (i.e. not older than oldestUnexpiredTS). - * @param timeRange the timeRange to restrict + * @param scan the current scan * @param oldestUnexpiredTS the oldest timestamp that is not expired, as * determined by the column family's TTL * @return false if queried keys definitely don't exist in this StoreFile */ - boolean passesTimerangeFilter(TimeRange timeRange, long oldestUnexpiredTS) { + boolean passesTimerangeFilter(Scan scan, long oldestUnexpiredTS) { if (timeRangeTracker == null) { return true; } else { - return timeRangeTracker.includesTimeRange(timeRange) && + return timeRangeTracker.includesTimeRange(scan.getTimeRange()) && timeRangeTracker.getMaximumTimestamp() >= oldestUnexpiredTS; } } http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index 372f963..0156637 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.NavigableSet; +import java.util.SortedSet; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; @@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.StoreFile.Reader; @@ -430,16 +429,9 @@ public class StoreFileScanner implements KeyValueScanner { } @Override - public boolean shouldUseScanner(Scan scan, Store store, long oldestUnexpiredTS) { - byte[] columnFamily = store.getFamily().getName(); - TimeRange timeRange = scan.getColumnFamilyTimeRange().get(columnFamily); - if (timeRange == null) { - timeRange = scan.getTimeRange(); - } - - NavigableSet<byte[]> columns = scan.getFamilyMap().get(columnFamily); - return reader.passesTimerangeFilter(timeRange, oldestUnexpiredTS) && reader - .passesKeyRangeFilter(scan) && reader.passesBloomFilter(scan, columns); + public boolean shouldUseScanner(Scan scan, SortedSet<byte[]> columns, long oldestUnexpiredTS) { + return reader.passesTimerangeFilter(scan, oldestUnexpiredTS) + && reader.passesKeyRangeFilter(scan) && reader.passesBloomFilter(scan, columns); } @Override http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index d905000..fc034ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -372,7 +372,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner continue; } - if (kvs.shouldUseScanner(scan, store, expiredTimestampCutoff)) { + if (kvs.shouldUseScanner(scan, columns, expiredTimestampCutoff)) { scanners.add(kvs); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java index bdc5a8f0..19b460e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java @@ -260,7 +260,7 @@ public class TestHFileWriterV2 { // Static stuff used by various HFile v2 unit tests - public static final String COLUMN_FAMILY_NAME = "_-myColumnFamily-_"; + private static final String COLUMN_FAMILY_NAME = "_-myColumnFamily-_"; private static final int MIN_ROW_OR_QUALIFIER_LENGTH = 64; private static final int MAX_ROW_OR_QUALIFIER_LENGTH = 128; http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index 739175c..9628623 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; @@ -39,7 +37,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.client.Scan; @@ -285,12 +282,9 @@ public class TestCompoundBloomFilter { private boolean isInBloom(StoreFileScanner scanner, byte[] row, byte[] qualifier) { Scan scan = new Scan(row, row); - scan.addColumn(Bytes.toBytes(TestHFileWriterV2.COLUMN_FAMILY_NAME), qualifier); - Store store = mock(Store.class); - HColumnDescriptor hcd = mock(HColumnDescriptor.class); - when(hcd.getName()).thenReturn(Bytes.toBytes(TestHFileWriterV2.COLUMN_FAMILY_NAME)); - when(store.getFamily()).thenReturn(hcd); - return scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); + TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR); + columns.add(qualifier); + return scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE); } private Path writeStoreFile(int t, BloomType bt, List<KeyValue> kvs) http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java index 8a365f9..7fdade9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java @@ -53,9 +53,6 @@ import com.google.common.base.Joiner; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - /** memstore test case */ @Category(MediumTests.class) public class TestMemStore extends TestCase { @@ -736,31 +733,28 @@ public class TestMemStore extends TestCase { * Test to ensure correctness when using Memstore with multiple timestamps */ public void testMultipleTimestamps() throws IOException { - long[] timestamps = new long[] { 20, 10, 5, 1 }; + long[] timestamps = new long[] {20,10,5,1}; Scan scan = new Scan(); - for (long timestamp : timestamps) - addRows(memstore, timestamp); + for (long timestamp: timestamps) + addRows(memstore,timestamp); - byte[] fam = Bytes.toBytes("fam"); - HColumnDescriptor hcd = mock(HColumnDescriptor.class); - when(hcd.getName()).thenReturn(fam); - Store store = mock(Store.class); - when(store.getFamily()).thenReturn(hcd); - scan.setColumnFamilyTimeRange(fam, 0, 2); - assertTrue(memstore.shouldSeek(scan, store, Long.MIN_VALUE)); + scan.setTimeRange(0, 2); + assertTrue(memstore.shouldSeek(scan, Long.MIN_VALUE)); - scan.setColumnFamilyTimeRange(fam, 20, 82); - assertTrue(memstore.shouldSeek(scan, store, Long.MIN_VALUE)); + scan.setTimeRange(20, 82); + assertTrue(memstore.shouldSeek(scan, Long.MIN_VALUE)); - scan.setColumnFamilyTimeRange(fam, 10, 20); - assertTrue(memstore.shouldSeek(scan, store, Long.MIN_VALUE)); + scan.setTimeRange(10, 20); + assertTrue(memstore.shouldSeek(scan, Long.MIN_VALUE)); - scan.setColumnFamilyTimeRange(fam, 8, 12); - assertTrue(memstore.shouldSeek(scan, store, Long.MIN_VALUE)); + scan.setTimeRange(8, 12); + assertTrue(memstore.shouldSeek(scan, Long.MIN_VALUE)); - scan.setColumnFamilyTimeRange(fam, 28, 42); - assertTrue(!memstore.shouldSeek(scan, store, Long.MIN_VALUE)); + /*This test is not required for correctness but it should pass when + * timestamp range optimization is on*/ + //scan.setTimeRange(28, 42); + //assertTrue(!memstore.shouldSeek(scan)); } //////////////////////////////////// http://git-wip-us.apache.org/repos/asf/hbase/blob/3c46a46d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index c27e6bc..d302180 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -33,7 +33,12 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -50,7 +55,6 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.FSUtils; -import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; @@ -58,9 +62,6 @@ import com.google.common.base.Joiner; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - /** * Test HStoreFile */ @@ -471,13 +472,8 @@ public class TestStoreFile extends HBaseTestCase { columns.add("family:col".getBytes()); Scan scan = new Scan(row.getBytes(),row.getBytes()); - byte[] family = "family".getBytes(); - scan.addColumn(family, "family:col".getBytes()); - Store store = mock(Store.class); - HColumnDescriptor hcd = mock(HColumnDescriptor.class); - when(hcd.getName()).thenReturn(family); - when(store.getFamily()).thenReturn(hcd); - boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); + scan.addColumn("family".getBytes(), "family:col".getBytes()); + boolean exists = scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE); if (i % 2 == 0) { if (!exists) falseNeg++; } else { @@ -666,14 +662,9 @@ public class TestStoreFile extends HBaseTestCase { columns.add(("col" + col).getBytes()); Scan scan = new Scan(row.getBytes(),row.getBytes()); - byte[] family = "family".getBytes(); - scan.addColumn(family, ("col" + col).getBytes()); - Store store = mock(Store.class); - HColumnDescriptor hcd = mock(HColumnDescriptor.class); - when(hcd.getName()).thenReturn(family); - when(store.getFamily()).thenReturn(hcd); + scan.addColumn("family".getBytes(), ("col"+col).getBytes()); boolean exists = - scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); + scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE); boolean shouldRowExist = i % 2 == 0; boolean shouldColExist = j % 2 == 0; shouldColExist = shouldColExist || bt[x] == BloomType.ROW; @@ -780,7 +771,7 @@ public class TestStoreFile extends HBaseTestCase { Scan scan = new Scan(); // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. - Path storedir = new Path(new Path(this.testDir, "7e0102"), Bytes.toString(family)); + Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname"); Path dir = new Path(storedir, "1234567890"); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -790,7 +781,7 @@ public class TestStoreFile extends HBaseTestCase { .build(); List<KeyValue> kvList = getKeyValueSet(timestamps,numRows, - qualifier, family); + family, qualifier); for (KeyValue kv : kvList) { writer.append(kv); @@ -805,34 +796,21 @@ public class TestStoreFile extends HBaseTestCase { TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR); columns.add(qualifier); - Store store = mock(Store.class); - HColumnDescriptor hcd = mock(HColumnDescriptor.class); - when(hcd.getName()).thenReturn(family); - when(store.getFamily()).thenReturn(hcd); - scan.setTimeRange(20, 100); - assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); + assertTrue(scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE)); scan.setTimeRange(1, 2); - assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); + assertTrue(scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE)); scan.setTimeRange(8, 10); - assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); + assertTrue(scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE)); - // lets make sure it still works with column family time ranges - scan.setColumnFamilyTimeRange(family, 7, 50); - assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); + scan.setTimeRange(7, 50); + assertTrue(scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE)); // This test relies on the timestamp range optimization - scan = new Scan(); - scan.setTimeRange(27, 50); - assertTrue(!scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); - - // should still use the scanner because we override the family time range - scan = new Scan(); scan.setTimeRange(27, 50); - scan.setColumnFamilyTimeRange(family, 7, 50); - assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); + assertTrue(!scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE)); } public void testCacheOnWriteEvictOnClose() throws Exception {
