Author: apurtell
Date: Tue Nov 2 00:00:32 2010
New Revision: 1029899
URL: http://svn.apache.org/viewvc?rev=1029899&view=rev
Log:
HBASE-3120 [rest] Content transcoding
Added:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Base64.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/NullTransform.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Transform.java
hbase/trunk/src/test/java/org/apache/hadoop/hbase/rest/TestTransform.java
Modified:
hbase/trunk/CHANGES.txt
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
Modified: hbase/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Tue Nov 2 00:00:32 2010
@@ -1082,7 +1082,7 @@ Release 0.21.0 - Unreleased
HBASE-3154 HBase RPC should support timeout (Hairong via jgray)
HBASE-3184 Xmx setting in pom to use for tests/surefire does not appear
to work
-
+ HBASE-3120 [rest] Content transcoding
NEW FEATURES
HBASE-1961 HBase EC2 scripts
Modified:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
Tue Nov 2 00:00:32 2010
@@ -31,8 +31,6 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-
public class ExistsResource extends ResourceBase {
static CacheControl cacheControl;
@@ -42,16 +40,16 @@ public class ExistsResource extends Reso
cacheControl.setNoTransform(false);
}
- String tableName;
+ TableResource tableResource;
/**
* Constructor
- * @param table
+ * @param tableResource
* @throws IOException
*/
- public ExistsResource(String table) throws IOException {
+ public ExistsResource(TableResource tableResource) throws IOException {
super();
- this.tableName = table;
+ this.tableResource = tableResource;
}
@GET
@@ -59,8 +57,7 @@ public class ExistsResource extends Reso
MIMETYPE_BINARY})
public Response get(final @Context UriInfo uriInfo) {
try {
- HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
- if (!admin.tableExists(tableName)) {
+ if (!tableResource.exists()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
} catch (IOException e) {
Modified:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
Tue Nov 2 00:00:32 2010
@@ -55,17 +55,22 @@ public class RegionsResource extends Res
cacheControl.setNoTransform(false);
}
- String tableName;
+ TableResource tableResource;
- public RegionsResource(String table) throws IOException {
+ /**
+ * Constructor
+ * @param tableResource
+ * @throws IOException
+ */
+ public RegionsResource(TableResource tableResource) throws IOException {
super();
- this.tableName = table;
+ this.tableResource = tableResource;
}
private Map<HRegionInfo,HServerAddress> getTableRegions()
throws IOException {
HTablePool pool = servlet.getTablePool();
- HTableInterface table = pool.getTable(tableName);
+ HTableInterface table = pool.getTable(tableResource.getName());
try {
return ((HTable)table).getRegionsInfo();
} finally {
@@ -81,6 +86,7 @@ public class RegionsResource extends Res
}
servlet.getMetrics().incrementRequests(1);
try {
+ String tableName = tableResource.getName();
TableInfoModel model = new TableInfoModel(tableName);
Map<HRegionInfo,HServerAddress> regions = getTableRegions();
for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
Modified:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java Tue
Nov 2 00:00:32 2010
@@ -48,30 +48,27 @@ import org.apache.hadoop.hbase.client.Pu
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.hadoop.hbase.rest.transform.Transform;
import org.apache.hadoop.hbase.util.Bytes;
public class RowResource extends ResourceBase {
private static final Log LOG = LogFactory.getLog(RowResource.class);
- String tableName;
+ TableResource tableResource;
RowSpec rowspec;
/**
* Constructor
- * @param table
+ * @param tableResource
* @param rowspec
* @param versions
* @throws IOException
*/
- public RowResource(String table, String rowspec, String versions)
- throws IOException {
+ public RowResource(TableResource tableResource, String rowspec,
+ String versions) throws IOException {
super();
- this.tableName = table;
+ this.tableResource = tableResource;
this.rowspec = new RowSpec(rowspec);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new RowResource: table=" + this.tableName + "rowspec=" +
- this.rowspec);
- }
if (versions != null) {
this.rowspec.setMaxVersions(Integer.valueOf(versions));
}
@@ -86,7 +83,7 @@ public class RowResource extends Resourc
servlet.getMetrics().incrementRequests(1);
try {
ResultGenerator generator =
- ResultGenerator.fromRowSpec(tableName, rowspec, null);
+ ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
@@ -101,9 +98,12 @@ public class RowResource extends Resourc
rowKey = value.getRow();
rowModel = new RowModel(rowKey);
}
- rowModel.addCell(
- new CellModel(value.getFamily(), value.getQualifier(),
- value.getTimestamp(), value.getValue()));
+ byte[] family = value.getFamily();
+ byte[] qualifier = value.getQualifier();
+ byte[] data = tableResource.transform(family, qualifier,
+ value.getValue(), Transform.Direction.OUT);
+ rowModel.addCell(new CellModel(family, qualifier,
+ value.getTimestamp(), data));
if (++count > rowspec.getMaxValues()) {
break;
}
@@ -131,12 +131,16 @@ public class RowResource extends Resourc
}
try {
ResultGenerator generator =
- ResultGenerator.fromRowSpec(tableName, rowspec, null);
+ ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
KeyValue value = generator.next();
- ResponseBuilder response = Response.ok(value.getValue());
+ byte[] family = value.getFamily();
+ byte[] qualifier = value.getQualifier();
+ byte[] data = tableResource.transform(family, qualifier,
+ value.getValue(), Transform.Direction.OUT);
+ ResponseBuilder response = Response.ok(data);
response.header("X-Timestamp", value.getTimestamp());
return response.build();
} catch (IOException e) {
@@ -151,7 +155,7 @@ public class RowResource extends Resourc
HTableInterface table = null;
try {
List<RowModel> rows = model.getRows();
- table = pool.getTable(tableName);
+ table = pool.getTable(tableResource.getName());
((HTable)table).setAutoFlush(false);
for (RowModel row: rows) {
byte[] key = row.getKey();
@@ -159,9 +163,13 @@ public class RowResource extends Resourc
for (CellModel cell: row.getCells()) {
byte [][] parts = KeyValue.parseColumn(cell.getColumn());
if (parts.length == 2 && parts[1].length > 0) {
- put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
+ put.add(parts[0], parts[1], cell.getTimestamp(),
+ tableResource.transform(parts[0], parts[1], cell.getValue(),
+ Transform.Direction.IN));
} else {
- put.add(parts[0], null, cell.getTimestamp(), cell.getValue());
+ put.add(parts[0], null, cell.getTimestamp(),
+ tableResource.transform(parts[0], null, cell.getValue(),
+ Transform.Direction.IN));
}
}
table.put(put);
@@ -215,11 +223,15 @@ public class RowResource extends Resourc
Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column);
if (parts.length == 2 && parts[1].length > 0) {
- put.add(parts[0], parts[1], timestamp, message);
+ put.add(parts[0], parts[1], timestamp,
+ tableResource.transform(parts[0], parts[1], message,
+ Transform.Direction.IN));
} else {
- put.add(parts[0], null, timestamp, message);
+ put.add(parts[0], null, timestamp,
+ tableResource.transform(parts[0], null, message,
+ Transform.Direction.IN));
}
- table = pool.getTable(tableName);
+ table = pool.getTable(tableResource.getName());
table.put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
@@ -306,7 +318,7 @@ public class RowResource extends Resourc
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
- table = pool.getTable(tableName);
+ table = pool.getTable(tableResource.getName());
table.delete(delete);
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + delete.toString());
Modified:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
Tue Nov 2 00:00:32 2010
@@ -51,16 +51,16 @@ public class ScannerResource extends Res
static final Map<String,ScannerInstanceResource> scanners =
Collections.synchronizedMap(new HashMap<String,ScannerInstanceResource>());
- String tableName;
+ TableResource tableResource;
/**
* Constructor
- * @param table
+ * @param tableResource
* @throws IOException
*/
- public ScannerResource(String table) throws IOException {
+ public ScannerResource(TableResource tableResource)throws IOException {
super();
- this.tableName = table;
+ this.tableResource = tableResource;
}
static void delete(final String id) {
@@ -78,10 +78,11 @@ public class ScannerResource extends Res
model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
- ScannerResultGenerator gen =
+ String tableName = tableResource.getName();
+ ScannerResultGenerator gen =
new ScannerResultGenerator(tableName, spec, filter);
String id = gen.getID();
- ScannerInstanceResource instance =
+ ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
if (LOG.isDebugEnabled()) {
Modified:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
Tue Nov 2 00:00:32 2010
@@ -62,22 +62,22 @@ public class SchemaResource extends Reso
cacheControl.setNoTransform(false);
}
- String tableName;
+ TableResource tableResource;
/**
* Constructor
- * @param table
+ * @param tableResource
* @throws IOException
*/
- public SchemaResource(String table) throws IOException {
+ public SchemaResource(TableResource tableResource) throws IOException {
super();
- this.tableName = table;
+ this.tableResource = tableResource;
}
private HTableDescriptor getTableSchema() throws IOException,
TableNotFoundException {
HTablePool pool = servlet.getTablePool();
- HTableInterface table = pool.getTable(tableName);
+ HTableInterface table = pool.getTable(tableResource.getName());
try {
return table.getTableDescriptor();
} finally {
@@ -131,7 +131,7 @@ public class SchemaResource extends Reso
}
return Response.created(uriInfo.getAbsolutePath()).build();
} catch (IOException e) {
- throw new WebApplicationException(e,
+ throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
@@ -150,14 +150,14 @@ public class SchemaResource extends Reso
if (htd.hasFamily(hcd.getName())) {
admin.modifyColumn(name, hcd);
} else {
- admin.addColumn(name, hcd);
+ admin.addColumn(name, hcd);
}
}
} catch (IOException e) {
- throw new WebApplicationException(e,
+ throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
} finally {
- admin.enableTable(tableName);
+ admin.enableTable(tableResource.getName());
}
return Response.ok().build();
} catch (IOException e) {
@@ -169,7 +169,7 @@ public class SchemaResource extends Reso
private Response update(final TableSchemaModel model, final boolean replace,
final UriInfo uriInfo) {
try {
- byte[] name = Bytes.toBytes(tableName);
+ byte[] name = Bytes.toBytes(tableResource.getName());
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
if (replace || !admin.tableExists(name)) {
return replace(name, model, uriInfo, admin);
@@ -214,7 +214,7 @@ public class SchemaResource extends Reso
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
boolean success = false;
for (int i = 0; i < 10; i++) try {
- admin.disableTable(tableName);
+ admin.disableTable(tableResource.getName());
success = true;
break;
} catch (IOException e) {
@@ -222,7 +222,7 @@ public class SchemaResource extends Reso
if (!success) {
throw new IOException("could not disable table");
}
- admin.deleteTable(tableName);
+ admin.deleteTable(tableResource.getName());
return Response.ok().build();
} catch (TableNotFoundException e) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
Modified:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java?rev=1029899&r1=1029898&r2=1029899&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
Tue Nov 2 00:00:32 2010
@@ -21,17 +21,172 @@
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import javax.ws.rs.Encoded;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.rest.transform.NullTransform;
+import org.apache.hadoop.hbase.rest.transform.Transform;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.util.StringUtils;
+
public class TableResource extends ResourceBase {
+ private static final Log LOG = LogFactory.getLog(TableResource.class);
+
+ /**
+ * HCD attributes starting with this string are considered transform
+ * directives
+ */
+ private static final String DIRECTIVE_KEY = "Transform$";
+
+ /**
+ * Transform directives are of the form
<tt><qualifier>:<class></tt>
+ * where <tt>qualifier</tt> is a string for exact matching or '*' as a
wildcard
+ * that will match anything; and <tt>class</tt> is either the fully qualified
+ * class name of a transform implementation or can be the short name of a
+ * transform in the <tt>org.apache.hadoop.hbase.rest.transform package</tt>.
+ */
+ private static final Pattern DIRECTIVE_PATTERN =
+ Pattern.compile("([^\\:]+)\\:([^\\,]+)\\,?");
+ private static final Transform defaultTransform = new NullTransform();
+ private static final
+ Map<String,Map<byte[],Map<byte[],Transform>>> transformMap =
+ new ConcurrentHashMap<String,Map<byte[],Map<byte[],Transform>>>();
+ private static final Map<String,Long> lastCheckedMap =
+ new ConcurrentHashMap<String,Long>();
+
+ /**
+ * @param table the table
+ * @param family the column family
+ * @param qualifier the column qualifier, or null
+ * @return the transformation specified for the given family or qualifier, if
+ * any, otherwise the default
+ */
+ static Transform getTransform(String table, byte[] family, byte[] qualifier)
{
+ if (qualifier == null) {
+ qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ }
+ Map<byte[],Map<byte[],Transform>> familyMap = transformMap.get(table);
+ if (familyMap != null) {
+ Map<byte[],Transform> columnMap = familyMap.get(family);
+ if (columnMap != null) {
+ Transform t = columnMap.get(qualifier);
+ // check as necessary if there is a wildcard entry
+ if (t == null) {
+ t = columnMap.get(HConstants.EMPTY_BYTE_ARRAY);
+ }
+ // if we found something, return it, otherwise we will return the
+ // default by falling through
+ if (t != null) {
+ return t;
+ }
+ }
+ }
+ return defaultTransform;
+ }
+
+ synchronized static void setTransform(String table, byte[] family,
+ byte[] qualifier, Transform transform) {
+ Map<byte[],Map<byte[],Transform>> familyMap = transformMap.get(table);
+ if (familyMap == null) {
+ familyMap = new ConcurrentSkipListMap<byte[],Map<byte[],Transform>>(
+ Bytes.BYTES_COMPARATOR);
+ transformMap.put(table, familyMap);
+ }
+ Map<byte[],Transform> columnMap = familyMap.get(family);
+ if (columnMap == null) {
+ columnMap = new ConcurrentSkipListMap<byte[],Transform>(
+ Bytes.BYTES_COMPARATOR);
+ familyMap.put(family, columnMap);
+ }
+ // if transform is null, remove any existing entry
+ if (transform != null) {
+ columnMap.put(qualifier, transform);
+ } else {
+ columnMap.remove(qualifier);
+ }
+ }
String table;
/**
+ * Scan the table schema for transform directives. These are column family
+ * attributes containing a comma-separated list of elements of the form
+ * <tt><qualifier>:<transform-class></tt>, where qualifier
+ * can be a string for exact matching or '*' as a wildcard to match anything.
+ * The attribute key must begin with the string "Transform$".
+ */
+ void scanTransformAttrs() throws IOException {
+ HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
+ HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes(table));
+ for (HColumnDescriptor hcd: htd.getFamilies()) {
+ for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
+ hcd.getValues().entrySet()) {
+ // does the key start with the transform directive tag?
+ String key = Bytes.toString(e.getKey().get());
+ if (!key.startsWith(DIRECTIVE_KEY)) {
+ // no, skip
+ continue;
+ }
+ // match a comma separated list of one or more directives
+ byte[] value = e.getValue().get();
+ Matcher m = DIRECTIVE_PATTERN.matcher(Bytes.toString(value));
+ while (m.find()) {
+ byte[] qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ String s = m.group(1);
+ if (s.length() > 0 && !s.equals("*")) {
+ qualifier = Bytes.toBytes(s);
+ }
+ boolean retry = false;
+ String className = m.group(2);
+ while (true) {
+ try {
+ // if a transform was previously configured for the qualifier,
+ // this will simply replace it
+ setTransform(table, hcd.getName(), qualifier,
+ (Transform)Class.forName(className).newInstance());
+ break;
+ } catch (InstantiationException ex) {
+ LOG.error(StringUtils.stringifyException(ex));
+ if (retry) {
+ break;
+ }
+ retry = true;
+ } catch (IllegalAccessException ex) {
+ LOG.error(StringUtils.stringifyException(ex));
+ if (retry) {
+ break;
+ }
+ retry = true;
+ } catch (ClassNotFoundException ex) {
+ if (retry) {
+ LOG.error(StringUtils.stringifyException(ex));
+ break;
+ }
+ className = "org.apache.hadoop.hbase.rest.transform." +
className;
+ retry = true;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /**
* Constructor
* @param table
* @throws IOException
@@ -39,26 +194,74 @@ public class TableResource extends Resou
public TableResource(String table) throws IOException {
super();
this.table = table;
+ // Scanning the table schema is too expensive to do for every operation.
+ // Do it once per minute by default.
+ // Setting hbase.rest.transform.check.interval to <= 0 disables rescanning.
+ long now = System.currentTimeMillis();
+ Long lastChecked = lastCheckedMap.get(table);
+ if (lastChecked != null) {
+ long interval = servlet.getConfiguration()
+ .getLong("hbase.rest.transform.check.interval", 60000);
+ if (interval > 0 && (now - lastChecked.longValue()) > interval) {
+ scanTransformAttrs();
+ lastCheckedMap.put(table, now);
+ }
+ } else {
+ scanTransformAttrs();
+ lastCheckedMap.put(table, now);
+ }
+ }
+
+ /** @return the table name */
+ String getName() {
+ return table;
+ }
+
+ /**
+ * @return true if the table exists
+ * @throws IOException
+ */
+ boolean exists() throws IOException {
+ HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
+ return admin.tableExists(table);
+ }
+
+ /**
+ * Apply any configured transformations to the value
+ * @param family
+ * @param qualifier
+ * @param value
+ * @param direction
+ * @return
+ * @throws IOException
+ */
+ byte[] transform(byte[] family, byte[] qualifier, byte[] value,
+ Transform.Direction direction) throws IOException {
+ Transform t = getTransform(table, family, qualifier);
+ if (t != null) {
+ return t.transform(value, direction);
+ }
+ return value;
}
@Path("exists")
public ExistsResource getExistsResource() throws IOException {
- return new ExistsResource(table);
+ return new ExistsResource(this);
}
@Path("regions")
public RegionsResource getRegionsResource() throws IOException {
- return new RegionsResource(table);
+ return new RegionsResource(this);
}
@Path("scanner")
public ScannerResource getScannerResource() throws IOException {
- return new ScannerResource(table);
+ return new ScannerResource(this);
}
@Path("schema")
public SchemaResource getSchemaResource() throws IOException {
- return new SchemaResource(table);
+ return new SchemaResource(this);
}
@Path("{rowspec: .+}")
@@ -67,6 +270,6 @@ public class TableResource extends Resou
// the RowSpec constructor has a chance to parse
final @PathParam("rowspec") @Encoded String rowspec,
final @QueryParam("v") String versions) throws IOException {
- return new RowResource(table, rowspec, versions);
+ return new RowResource(this, rowspec, versions);
}
}
Added:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Base64.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Base64.java?rev=1029899&view=auto
==============================================================================
---
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Base64.java
(added)
+++
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Base64.java
Tue Nov 2 00:00:32 2010
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest.transform;
+
+public class Base64 implements Transform {
+ @Override
+ public byte[] transform(byte[] data, Direction direction) {
+ switch (direction) {
+ case IN:
+ return com.sun.jersey.core.util.Base64.encode(data);
+ case OUT:
+ return com.sun.jersey.core.util.Base64.decode(data);
+ default:
+ throw new RuntimeException("illegal direction");
+ }
+ }
+}
Added:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/NullTransform.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/NullTransform.java?rev=1029899&view=auto
==============================================================================
---
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/NullTransform.java
(added)
+++
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/NullTransform.java
Tue Nov 2 00:00:32 2010
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest.transform;
+
+public class NullTransform implements Transform {
+ @Override
+ public byte[] transform(byte[] data, Direction direction) {
+ return data;
+ }
+}
Added:
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Transform.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Transform.java?rev=1029899&view=auto
==============================================================================
---
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Transform.java
(added)
+++
hbase/trunk/src/main/java/org/apache/hadoop/hbase/rest/transform/Transform.java
Tue Nov 2 00:00:32 2010
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest.transform;
+
+/**
+ * Data transformation module
+ */
+public interface Transform {
+
+ /*** Transfer direction */
+ static enum Direction {
+ /** From client to server */
+ IN,
+ /** From server to client */
+ OUT
+ };
+
+ /**
+ * Transform data from one representation to another according to
+ * transfer direction.
+ * @param in input data
+ * @param direction IN or OUT
+ * @return the transformed data
+ */
+ byte[] transform (byte[] data, Direction direction);
+}
Added: hbase/trunk/src/test/java/org/apache/hadoop/hbase/rest/TestTransform.java
URL:
http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/rest/TestTransform.java?rev=1029899&view=auto
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/rest/TestTransform.java
(added)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/rest/TestTransform.java
Tue Nov 2 00:00:32 2010
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.rest.client.Client;
+import org.apache.hadoop.hbase.rest.client.Cluster;
+import org.apache.hadoop.hbase.rest.client.Response;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class TestTransform extends HBaseRESTClusterTestBase {
+ static final String TABLE = "TestTransform";
+ static final String CFA = "a";
+ static final String CFB = "b";
+ static final String COLUMN_1 = CFA + ":1";
+ static final String COLUMN_2 = CFB + ":2";
+ static final String ROW_1 = "testrow1";
+ static final byte[] VALUE_1 = Bytes.toBytes("testvalue1");
+ static final byte[] VALUE_2 = Bytes.toBytes("testvalue2");
+ static final byte[] VALUE_2_BASE64 = Bytes.toBytes("dGVzdHZhbHVlMg==");
+
+ Client client;
+ HBaseAdmin admin;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ client = new Client(new Cluster().add("localhost", testServletPort));
+ admin = new HBaseAdmin(conf);
+ if (admin.tableExists(TABLE)) {
+ return;
+ }
+ HTableDescriptor htd = new HTableDescriptor(TABLE);
+ htd.addFamily(new HColumnDescriptor(CFA));
+ HColumnDescriptor cfB = new HColumnDescriptor(CFB);
+ cfB.setValue("Transform$1", "*:Base64");
+ htd.addFamily(cfB);
+ admin.createTable(htd);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ client.shutdown();
+ super.tearDown();
+ }
+
+ public void testTransform() throws Exception {
+ String path1 = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1;
+ String path2 = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_2;
+
+ // store value 1
+ Response response = client.put(path1, MIMETYPE_BINARY, VALUE_1);
+ assertEquals(response.getCode(), 200);
+
+ // store value 2 (stargate should transform into base64)
+ response = client.put(path2, MIMETYPE_BINARY, VALUE_2);
+ assertEquals(response.getCode(), 200);
+
+ // get the table contents directly
+ HTable table = new HTable(TABLE);
+ Get get = new Get(Bytes.toBytes(ROW_1));
+ get.addFamily(Bytes.toBytes(CFA));
+ get.addFamily(Bytes.toBytes(CFB));
+ Result result = table.get(get);
+ // value 1 should not be transformed
+ byte[] value = result.getValue(Bytes.toBytes(CFA), Bytes.toBytes("1"));
+ assertNotNull(value);
+ assertTrue(Bytes.equals(value, VALUE_1));
+ // value 2 should have been base64 encoded
+ value = result.getValue(Bytes.toBytes(CFB), Bytes.toBytes("2"));
+ assertNotNull(value);
+ assertTrue(Bytes.equals(value, VALUE_2_BASE64));
+ table.close();
+
+ // stargate should decode the transformed value back to original bytes
+ response = client.get(path2, MIMETYPE_BINARY);
+ assertEquals(response.getCode(), 200);
+ value = response.getBody();
+ assertTrue(Bytes.equals(value, VALUE_2));
+ }
+}