Author: mbautin
Date: Thu Feb 2 19:32:05 2012
New Revision: 1239780
URL: http://svn.apache.org/viewvc?rev=1239780&view=rev
Log:
[jira] [HBASE-4542] [89-fb] Add filter info to slow query logging
Summary:
Slow opertaion log does not provide enough information when a filter is
present. The followings are done to add the filter info:
1) Added toString() method for filters inheriting FilterBase, this
affect 22 filters and their subclasses. The info added includes the
filter's name and its members. For example, for TimestampsFilter, we'll
output its class name as well as the defined timestamps.
2) Added a field 'filter' in Get::toMap() and
Scan::toMap() to enable the logging of filter info.
Test Plan:
1. Run and passed unit-tests to make sure it does not break things
2. Run kannan's script to trigger the slow operation logging, checked
for each filter to make sure the filter info was logged. To be more
detailed, the output log are as following (only 'filter' filed is put
here for ease of reading):
* "filter":"TimestampsFilter (3/3): [2, 3, 5]"
* "filter":"TimestampsFilter (5/6): [2, 3, 5, 7, 11]"
* "filter":"ColumnPrefixFilter col2"
* "filter":"ColumnRangeFilter [col2a, col2b]"
* "filter":"ColumnCountGetFilter 8"
* "filter":"ColumnPaginationFilter (4, 4)"
* "filter":"InclusiveStopFilter row"
* "filter":"PrefixFilter row"
* "filter":"PageFilter 1"
* "filter":"SkipFilter TimestampsFilter (1/1): [1000]"
* "filter":"WhileMatchFilter TimestampsFilter (3/3): [2, 3, 5]"
* "filter":"KeyOnlyFilter"
* "filter":"FirstKeyOnlyFilter"
* "filter":"MultipleColumnPrefixFilter (3/3): [a, b, c]"
* "filter":"DependentColumnFilter (family, qualifier, true, LESS, value)"
* "filter":"FamilyFilter (LESS, value)"
* "filter":"QualifierFilter (LESS, value)"
* "filter":"RowFilter (LESS, value)"
* "filter":"ValueFilter (LESS, value)"
* "filter":"KeyOnlyFilter"
* "filter":"FirstKeyOnlyFilter"
* "filter":"SingleColumnValueFilter (family, qualifier, EQUAL, value)"
* "filter":"SingleColumnValueExcludeFilter (family, qualifier, EQUAL,
value)"
* "filter":"FilterList AND (2/2): [KeyOnlyFilter, FirstKeyOnlyFilter]"
Please check ~zhiqiu/Codes/scripts/testFilter.rb for the testing script.
3. Added unit test cases to TestOperation to verify the filters'
toString() method works well.
Revert Plan:
Tags:
Reviewers: Kannan, madhuvaidya, mbautin, JIRA
Reviewed By: mbautin
CC: Kannan, mbautin, zhiqiu
Differential Revision: https://reviews.facebook.net/D1263
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Get.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Scan.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Get.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Get.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Get.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Get.java
Thu Feb 2 19:32:05 2012
@@ -367,7 +367,7 @@ public class Get extends Operation imple
}
map.put("totalColumns", colCount);
if (this.filter != null) {
- map.put("Filter", this.filter.getClass().getName());
+ map.put("filter", this.filter.toString());
}
return map;
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Scan.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Scan.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Scan.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/client/Scan.java
Thu Feb 2 19:32:05 2012
@@ -291,11 +291,11 @@ public class Scan extends Operation impl
* @param batch the maximum number of values
*/
public void setBatch(int batch) {
- if(this.hasFilter() && this.filter.hasFilterRow()) {
- throw new IncompatibleFilterException(
+ if (this.hasFilter() && this.filter.hasFilterRow()) {
+ throw new IncompatibleFilterException(
"Cannot set batch on a scan using a filter" +
" that returns true for filter.hasFilterRow");
- }
+ }
this.batch = batch;
}
@@ -532,7 +532,7 @@ public class Scan extends Operation impl
}
map.put("totalColumns", colCount);
if (this.filter != null) {
- map.put("filter", this.filter.getClass().getName());
+ map.put("filter", this.filter.toString());
}
return map;
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
Thu Feb 2 19:32:05 2012
@@ -87,4 +87,9 @@ public class ColumnCountGetFilter extend
public void write(DataOutput out) throws IOException {
out.writeInt(this.limit);
}
-}
\ No newline at end of file
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " + this.limit;
+ }
+}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
Thu Feb 2 19:32:05 2012
@@ -101,4 +101,10 @@ public class ColumnPaginationFilter exte
out.writeInt(this.limit);
out.writeInt(this.offset);
}
-}
\ No newline at end of file
+
+ @Override
+ public String toString() {
+ return String.format("%s (%d, %d)", this.getClass().getSimpleName(),
+ this.limit, this.offset);
+ }
+}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
Thu Feb 2 19:32:05 2012
@@ -101,4 +101,9 @@ public class ColumnPrefixFilter extends
kv.getBuffer(), kv.getRowOffset(), kv.getRowLength(), kv.getBuffer(),
kv.getFamilyOffset(), kv.getFamilyLength(), prefix, 0, prefix.length);
}
-}
\ No newline at end of file
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " +
Bytes.toStringBinary(this.prefix);
+ }
+}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
Thu Feb 2 19:32:05 2012
@@ -188,4 +188,12 @@ public class ColumnRangeFilter extends F
.getFamilyLength(), this.minColumn, 0, this.minColumn == null ? 0
: this.minColumn.length);
}
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " "
+ + (this.minColumnInclusive ? "[" : "(") +
Bytes.toStringBinary(this.minColumn)
+ + ", " + Bytes.toStringBinary(this.maxColumn)
+ + (this.maxColumnInclusive ? "]" : ")");
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
Thu Feb 2 19:32:05 2012
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.filter;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
+import org.apache.hadoop.hbase.util.Bytes;
import java.io.DataInput;
import java.io.DataOutput;
@@ -103,9 +104,9 @@ public abstract class CompareFilter exte
protected boolean doCompare(final CompareOp compareOp,
final WritableByteArrayComparable comparator, final byte [] data,
final int offset, final int length) {
- if (compareOp == CompareOp.NO_OP) {
- return true;
- }
+ if (compareOp == CompareOp.NO_OP) {
+ return true;
+ }
int compareResult =
comparator.compareTo(Arrays.copyOfRange(data, offset,
offset + length));
@@ -160,4 +161,12 @@ public abstract class CompareFilter exte
HbaseObjectWritable.writeObject(out, comparator,
WritableByteArrayComparable.class, null);
}
+
+ @Override
+ public String toString() {
+ return String.format("%s (%s, %s)",
+ this.getClass().getSimpleName(),
+ this.compareOp.name(),
+ Bytes.toStringBinary(this.comparator.getValue()));
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
Thu Feb 2 19:32:05 2012
@@ -223,4 +223,14 @@ public class DependentColumnFilter exten
out.writeBoolean(this.dropDependentColumn);
}
+ @Override
+ public String toString() {
+ return String.format("%s (%s, %s, %s, %s, %s)",
+ this.getClass().getSimpleName(),
+ Bytes.toStringBinary(this.columnFamily),
+ Bytes.toStringBinary(this.columnQualifier),
+ this.dropDependentColumn,
+ this.compareOp.name(),
+ Bytes.toStringBinary(this.comparator.getValue()));
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
Thu Feb 2 19:32:05 2012
@@ -131,4 +131,11 @@ public abstract class FilterBase impleme
public static Filter createFilterFromArguments (ArrayList<byte []>
filterArguments) {
throw new IllegalArgumentException("Method not implemented");
}
+
+ /**
+ * Return filter's info for debugging and logging purpose.
+ */
+ public String toString() {
+ return this.getClass().getSimpleName();
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
Thu Feb 2 19:32:05 2012
@@ -50,6 +50,7 @@ public class FilterList implements Filte
}
private static final Configuration conf = HBaseConfiguration.create();
+ private static final int MAX_LOG_FILTERS = 5;
private Operator operator = Operator.MUST_PASS_ALL;
private List<Filter> filters = new ArrayList<Filter>();
@@ -253,4 +254,20 @@ public class FilterList implements Filte
public KeyValue getNextKeyHint(KeyValue currentKV) {
return null;
}
-}
+
+ @Override
+ public String toString() {
+ return toString(MAX_LOG_FILTERS);
+ }
+
+ protected String toString(int maxFilters) {
+ int endIndex = this.filters.size() < maxFilters
+ ? this.filters.size() : maxFilters;
+ return String.format("%s %s (%d/%d): %s",
+ this.getClass().getSimpleName(),
+ this.operator == Operator.MUST_PASS_ALL ? "AND" : "OR",
+ endIndex,
+ this.filters.size(),
+ this.filters.subList(0, endIndex).toString());
+ }
+}
\ No newline at end of file
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
Thu Feb 2 19:32:05 2012
@@ -89,4 +89,9 @@ public class InclusiveStopFilter extends
public void readFields(DataInput in) throws IOException {
this.stopRowKey = Bytes.readByteArray(in);
}
-}
\ No newline at end of file
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " +
Bytes.toStringBinary(this.stopRowKey);
+ }
+}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
Thu Feb 2 19:32:05 2012
@@ -39,6 +39,7 @@ import java.util.ArrayList;
public class MultipleColumnPrefixFilter extends FilterBase {
protected byte [] hint = null;
protected TreeSet<byte []> sortedPrefixes = createTreeSet();
+ private final static int MAX_LOG_PREFIXES = 5;
public MultipleColumnPrefixFilter() {
super();
@@ -139,4 +140,28 @@ public class MultipleColumnPrefixFilter
}
});
}
+
+ @Override
+ public String toString() {
+ return toString(MAX_LOG_PREFIXES);
+ }
+
+ protected String toString(int maxPrefixes) {
+ StringBuilder prefixes = new StringBuilder();
+
+ int count = 0;
+ for (byte[] ba : this.sortedPrefixes) {
+ if (count >= maxPrefixes) {
+ break;
+ }
+ ++count;
+ prefixes.append(Bytes.toStringBinary(ba));
+ if (count < this.sortedPrefixes.size() && count < maxPrefixes) {
+ prefixes.append(", ");
+ }
+ }
+
+ return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(),
+ count, this.sortedPrefixes.size(), prefixes.toString());
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
Thu Feb 2 19:32:05 2012
@@ -88,4 +88,9 @@ public class PageFilter extends FilterBa
public void write(final DataOutput out) throws IOException {
out.writeLong(pageSize);
}
-}
\ No newline at end of file
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " + this.pageSize;
+ }
+}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
Thu Feb 2 19:32:05 2012
@@ -84,4 +84,9 @@ public class PrefixFilter extends Filter
public void readFields(DataInput in) throws IOException {
this.prefix = Bytes.readByteArray(in);
}
-}
\ No newline at end of file
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " +
Bytes.toStringBinary(this.prefix);
+ }
+}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
Thu Feb 2 19:32:05 2012
@@ -305,4 +305,12 @@ public class SingleColumnValueFilter ext
out.writeBoolean(filterIfMissing);
out.writeBoolean(latestVersionOnly);
}
+
+ @Override
+ public String toString() {
+ return String.format("%s (%s, %s, %s, %s)",
+ this.getClass().getSimpleName(),
Bytes.toStringBinary(this.columnFamily),
+ Bytes.toStringBinary(this.columnQualifier), this.compareOp.name(),
+ Bytes.toStringBinary(this.comparator.getValue()));
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
Thu Feb 2 19:32:05 2012
@@ -98,4 +98,9 @@ public class SkipFilter extends FilterBa
throw new RuntimeException("Failed deserialize.", e);
}
}
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " + this.filter.toString();
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
Thu Feb 2 19:32:05 2012
@@ -22,6 +22,7 @@ import com.google.common.base.Preconditi
public class TimestampsFilter extends FilterBase {
TreeSet<Long> timestamps;
+ private static final int MAX_LOG_TIMESTAMPS = 5;
// Used during scans to hint the scan to stop early
// once the timestamps fall below the minTimeStamp.
@@ -116,4 +117,28 @@ public class TimestampsFilter extends Fi
.getFamilyLength(), kv.getBuffer(), kv.getQualifierOffset(), kv
.getQualifierLength(), nextTimestamp);
}
+
+ @Override
+ public String toString() {
+ return toString(MAX_LOG_TIMESTAMPS);
+ }
+
+ protected String toString(int maxTimestamps) {
+ StringBuilder tsList = new StringBuilder();
+
+ int count = 0;
+ for (Long ts : this.timestamps) {
+ if (count >= maxTimestamps) {
+ break;
+ }
+ ++count;
+ tsList.append(ts.toString());
+ if (count < this.timestamps.size() && count < maxTimestamps) {
+ tsList.append(", ");
+ }
+ }
+
+ return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(),
+ count, this.timestamps.size(), tsList.toString());
+ }
}
Modified:
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
(original)
+++
hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
Thu Feb 2 19:32:05 2012
@@ -99,4 +99,9 @@ public class WhileMatchFilter extends Fi
throw new RuntimeException("Failed deserialize.", e);
}
}
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName() + " " + this.filter.toString();
+ }
}
Modified:
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
URL:
http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java?rev=1239780&r1=1239779&r2=1239780&view=diff
==============================================================================
---
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
(original)
+++
hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
Thu Feb 2 19:32:05 2012
@@ -24,10 +24,36 @@ import static org.junit.Assert.assertNot
import org.junit.Test;
import java.io.IOException;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hbase.filter.BinaryComparator;
+import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
+import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;
+import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
+import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.DependentColumnFilter;
+import org.apache.hadoop.hbase.filter.FamilyFilter;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
+import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
+import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
+import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter;
+import org.apache.hadoop.hbase.filter.PageFilter;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.filter.QualifierFilter;
+import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
+import org.apache.hadoop.hbase.filter.SkipFilter;
+import org.apache.hadoop.hbase.filter.TimestampsFilter;
+import org.apache.hadoop.hbase.filter.ValueFilter;
+import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.map.ObjectMapper;
@@ -44,6 +70,217 @@ public class TestOperation {
private static ObjectMapper mapper = new ObjectMapper();
+ private static List<Long> TS_LIST = Arrays.asList(2L, 3L, 5L);
+ private static TimestampsFilter TS_FILTER = new TimestampsFilter(TS_LIST);
+ private static String STR_TS_FILTER =
+ TS_FILTER.getClass().getSimpleName() + " (3/3): [2, 3, 5]";
+
+ private static List<Long> L_TS_LIST =
+ Arrays.asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L);
+ private static TimestampsFilter L_TS_FILTER =
+ new TimestampsFilter(L_TS_LIST);
+ private static String STR_L_TS_FILTER =
+ L_TS_FILTER.getClass().getSimpleName() + " (5/11): [0, 1, 2, 3, 4]";
+
+ private static String COL_NAME_1 = "col1";
+ private static ColumnPrefixFilter COL_PRE_FILTER =
+ new ColumnPrefixFilter(COL_NAME_1.getBytes());
+ private static String STR_COL_PRE_FILTER =
+ COL_PRE_FILTER.getClass().getSimpleName() + " " + COL_NAME_1;
+
+ private static String COL_NAME_2 = "col2";
+ private static ColumnRangeFilter CR_FILTER = new ColumnRangeFilter(
+ COL_NAME_1.getBytes(), true, COL_NAME_2.getBytes(), false);
+ private static String STR_CR_FILTER = CR_FILTER.getClass().getSimpleName()
+ + " [" + COL_NAME_1 + ", " + COL_NAME_2 + ")";
+
+ private static int COL_COUNT = 9;
+ private static ColumnCountGetFilter CCG_FILTER =
+ new ColumnCountGetFilter(COL_COUNT);
+ private static String STR_CCG_FILTER =
+ CCG_FILTER.getClass().getSimpleName() + " " + COL_COUNT;
+
+ private static int LIMIT = 3;
+ private static int OFFSET = 4;
+ private static ColumnPaginationFilter CP_FILTER =
+ new ColumnPaginationFilter(LIMIT, OFFSET);
+ private static String STR_CP_FILTER = CP_FILTER.getClass().getSimpleName()
+ + " (" + LIMIT + ", " + OFFSET + ")";
+
+ private static String STOP_ROW_KEY = "stop";
+ private static InclusiveStopFilter IS_FILTER =
+ new InclusiveStopFilter(STOP_ROW_KEY.getBytes());
+ private static String STR_IS_FILTER =
+ IS_FILTER.getClass().getSimpleName() + " " + STOP_ROW_KEY;
+
+ private static String PREFIX = "prefix";
+ private static PrefixFilter PREFIX_FILTER =
+ new PrefixFilter(PREFIX.getBytes());
+ private static String STR_PREFIX_FILTER = "PrefixFilter " + PREFIX;
+
+ private static byte[][] PREFIXES = {
+ "0".getBytes(), "1".getBytes(), "2".getBytes()};
+ private static MultipleColumnPrefixFilter MCP_FILTER =
+ new MultipleColumnPrefixFilter(PREFIXES);
+ private static String STR_MCP_FILTER =
+ MCP_FILTER.getClass().getSimpleName() + " (3/3): [0, 1, 2]";
+
+ private static byte[][] L_PREFIXES = {
+ "0".getBytes(), "1".getBytes(), "2".getBytes(), "3".getBytes(),
+ "4".getBytes(), "5".getBytes(), "6".getBytes(), "7".getBytes()};
+ private static MultipleColumnPrefixFilter L_MCP_FILTER =
+ new MultipleColumnPrefixFilter(L_PREFIXES);
+ private static String STR_L_MCP_FILTER =
+ L_MCP_FILTER.getClass().getSimpleName() + " (5/8): [0, 1, 2, 3, 4]";
+
+ private static int PAGE_SIZE = 9;
+ private static PageFilter PAGE_FILTER = new PageFilter(PAGE_SIZE);
+ private static String STR_PAGE_FILTER =
+ PAGE_FILTER.getClass().getSimpleName() + " " + PAGE_SIZE;
+
+ private static SkipFilter SKIP_FILTER = new SkipFilter(L_TS_FILTER);
+ private static String STR_SKIP_FILTER =
+ SKIP_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER;
+
+ private static WhileMatchFilter WHILE_FILTER =
+ new WhileMatchFilter(L_TS_FILTER);
+ private static String STR_WHILE_FILTER =
+ WHILE_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER;
+
+ private static KeyOnlyFilter KEY_ONLY_FILTER = new KeyOnlyFilter();
+ private static String STR_KEY_ONLY_FILTER =
+ KEY_ONLY_FILTER.getClass().getSimpleName();
+
+ private static FirstKeyOnlyFilter FIRST_KEY_ONLY_FILTER =
+ new FirstKeyOnlyFilter();
+ private static String STR_FIRST_KEY_ONLY_FILTER =
+ FIRST_KEY_ONLY_FILTER.getClass().getSimpleName();
+
+ private static CompareOp CMP_OP = CompareOp.EQUAL;
+ private static byte[] CMP_VALUE = "value".getBytes();
+ private static BinaryComparator BC = new BinaryComparator(CMP_VALUE);
+ private static DependentColumnFilter DC_FILTER =
+ new DependentColumnFilter(FAMILY, QUALIFIER, true, CMP_OP, BC);
+ private static String STR_DC_FILTER = String.format(
+ "%s (%s, %s, %s, %s, %s)", DC_FILTER.getClass().getSimpleName(),
+ Bytes.toStringBinary(FAMILY), Bytes.toStringBinary(QUALIFIER), true,
+ CMP_OP.name(), Bytes.toStringBinary(BC.getValue()));
+
+ private static FamilyFilter FAMILY_FILTER = new FamilyFilter(CMP_OP, BC);
+ private static String STR_FAMILY_FILTER =
+ FAMILY_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
+
+ private static QualifierFilter QUALIFIER_FILTER =
+ new QualifierFilter(CMP_OP, BC);
+ private static String STR_QUALIFIER_FILTER =
+ QUALIFIER_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
+
+ private static RowFilter ROW_FILTER = new RowFilter(CMP_OP, BC);
+ private static String STR_ROW_FILTER =
+ ROW_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
+
+ private static ValueFilter VALUE_FILTER = new ValueFilter(CMP_OP, BC);
+ private static String STR_VALUE_FILTER =
+ VALUE_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
+
+ private static SingleColumnValueFilter SCV_FILTER =
+ new SingleColumnValueFilter(FAMILY, QUALIFIER, CMP_OP, CMP_VALUE);
+ private static String STR_SCV_FILTER = String.format("%s (%s, %s, %s, %s)",
+ SCV_FILTER.getClass().getSimpleName(), Bytes.toStringBinary(FAMILY),
+ Bytes.toStringBinary(QUALIFIER), CMP_OP.name(),
+ Bytes.toStringBinary(CMP_VALUE));
+
+ private static SingleColumnValueExcludeFilter SCVE_FILTER =
+ new SingleColumnValueExcludeFilter(FAMILY, QUALIFIER, CMP_OP, CMP_VALUE);
+ private static String STR_SCVE_FILTER = String.format("%s (%s, %s, %s, %s)",
+ SCVE_FILTER.getClass().getSimpleName(), Bytes.toStringBinary(FAMILY),
+ Bytes.toStringBinary(QUALIFIER), CMP_OP.name(),
+ Bytes.toStringBinary(CMP_VALUE));
+
+ private static FilterList AND_FILTER_LIST = new FilterList(
+ Operator.MUST_PASS_ALL, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER,
+ CR_FILTER));
+ private static String STR_AND_FILTER_LIST = String.format(
+ "%s AND (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(),
+ STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER);
+
+ private static FilterList OR_FILTER_LIST = new FilterList(
+ Operator.MUST_PASS_ONE, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER,
+ CR_FILTER));
+ private static String STR_OR_FILTER_LIST = String.format(
+ "%s OR (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(),
+ STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER);
+
+ private static FilterList L_FILTER_LIST = new FilterList(
+ Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, CR_FILTER, COL_PRE_FILTER,
+ CCG_FILTER, CP_FILTER, PREFIX_FILTER, PAGE_FILTER));
+ private static String STR_L_FILTER_LIST = String.format(
+ "%s AND (5/8): [%s, %s, %s, %s, %s]",
+ L_FILTER_LIST.getClass().getSimpleName(), STR_TS_FILTER, STR_L_TS_FILTER,
+ STR_CR_FILTER, STR_COL_PRE_FILTER, STR_CCG_FILTER, STR_CP_FILTER);
+
+ private static Filter[] FILTERS = {
+ TS_FILTER, // TimestampsFilter
+ L_TS_FILTER, // TimestampsFilter
+ COL_PRE_FILTER, // ColumnPrefixFilter
+ CP_FILTER, // ColumnPaginationFilter
+ CR_FILTER, // ColumnRangeFilter
+ CCG_FILTER, // ColumnCountGetFilter
+ IS_FILTER, // InclusiveStopFilter
+ PREFIX_FILTER, // PrefixFilter
+ PAGE_FILTER, // PageFilter
+ SKIP_FILTER, // SkipFilter
+ WHILE_FILTER, // WhileMatchFilter
+ KEY_ONLY_FILTER, // KeyOnlyFilter
+ FIRST_KEY_ONLY_FILTER, // FirstKeyOnlyFilter
+ MCP_FILTER, // MultipleColumnPrefixFilter
+ L_MCP_FILTER, // MultipleColumnPrefixFilter
+ DC_FILTER, // DependentColumnFilter
+ FAMILY_FILTER, // FamilyFilter
+ QUALIFIER_FILTER, // QualifierFilter
+ ROW_FILTER, // RowFilter
+ VALUE_FILTER, // ValueFilter
+ SCV_FILTER, // SingleColumnValueFilter
+ SCVE_FILTER, // SingleColumnValueExcludeFilter
+ AND_FILTER_LIST, // FilterList
+ OR_FILTER_LIST, // FilterList
+ L_FILTER_LIST, // FilterList
+ };
+
+ private static String[] FILTERS_INFO = {
+ STR_TS_FILTER, // TimestampsFilter
+ STR_L_TS_FILTER, // TimestampsFilter
+ STR_COL_PRE_FILTER, // ColumnPrefixFilter
+ STR_CP_FILTER, // ColumnPaginationFilter
+ STR_CR_FILTER, // ColumnRangeFilter
+ STR_CCG_FILTER, // ColumnCountGetFilter
+ STR_IS_FILTER, // InclusiveStopFilter
+ STR_PREFIX_FILTER, // PrefixFilter
+ STR_PAGE_FILTER, // PageFilter
+ STR_SKIP_FILTER, // SkipFilter
+ STR_WHILE_FILTER, // WhileMatchFilter
+ STR_KEY_ONLY_FILTER, // KeyOnlyFilter
+ STR_FIRST_KEY_ONLY_FILTER, // FirstKeyOnlyFilter
+ STR_MCP_FILTER, // MultipleColumnPrefixFilter
+ STR_L_MCP_FILTER, // MultipleColumnPrefixFilter
+ STR_DC_FILTER, // DependentColumnFilter
+ STR_FAMILY_FILTER, // FamilyFilter
+ STR_QUALIFIER_FILTER, // QualifierFilter
+ STR_ROW_FILTER, // RowFilter
+ STR_VALUE_FILTER, // ValueFilter
+ STR_SCV_FILTER, // SingleColumnValueFilter
+ STR_SCVE_FILTER, // SingleColumnValueExcludeFilter
+ STR_AND_FILTER_LIST, // FilterList
+ STR_OR_FILTER_LIST, // FilterList
+ STR_L_FILTER_LIST, // FilterList
+ };
+
+ static {
+ assertEquals("The sizes of static arrays do not match: "
+ + "[FILTERS: %d <=> FILTERS_INFO: %d]",
+ FILTERS.length, FILTERS_INFO.length);
+ }
+
/**
* Test the client Operations' JSON encoding to ensure that produced JSON is
* parseable and that the details are present and not corrupted.
@@ -127,4 +364,29 @@ public class TestOperation {
assertEquals("Qualifier incorrect in Delete.toJSON()",
Bytes.toStringBinary(QUALIFIER), kvMap.get("qualifier"));
}
+
+ /**
+ * Verify the filters' info are included in the (Get/Scan) operation.
+ *
+ * TODO: This relies on Operation.toJSON(), which means this test's failure
+ * can be due to corrupted JSON data. We may want to isolate this test by
+ * removing this dependency.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOperationWithFilters()
+ throws IOException {
+ Scan scan = new Scan();
+ Get get = new Get();
+ for (int i = 0; i < FILTERS.length; ++i) {
+ scan.setFilter(FILTERS[i]);
+ assertEquals(FILTERS_INFO[i],
+ mapper.readValue(scan.toJSON(), HashMap.class).get("filter"));
+
+ get.setFilter(FILTERS[i]);
+ assertEquals(FILTERS_INFO[i],
+ mapper.readValue(get.toJSON(), HashMap.class).get("filter"));
+ }
+ }
}