http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java deleted file mode 100644 index 65bf509..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ /dev/null @@ -1,858 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.client; - -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Append; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Durability; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HTableInterface; -import org.apache.hadoop.hbase.client.Increment; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.RowMutations; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.coprocessor.Batch; -import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.io.TimeRange; -import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import org.apache.hadoop.hbase.rest.Constants; -import org.apache.hadoop.hbase.rest.model.CellModel; -import org.apache.hadoop.hbase.rest.model.CellSetModel; -import org.apache.hadoop.hbase.rest.model.RowModel; -import org.apache.hadoop.hbase.rest.model.ScannerModel; -import org.apache.hadoop.hbase.rest.model.TableSchemaModel; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.util.StringUtils; - -import java.io.IOException; -import java.io.InterruptedIOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - -/** - * HTable interface to remote tables accessed via REST gateway - */ [email protected] [email protected] -public class RemoteHTable implements HTableInterface { - - private static final Log LOG = LogFactory.getLog(RemoteHTable.class); - - final Client client; - final Configuration conf; - final byte[] name; - final int maxRetries; - final long sleepTime; - - @SuppressWarnings("rawtypes") - protected String buildRowSpec(final byte[] row, final Map familyMap, - final long startTime, final long endTime, final int maxVersions) { - StringBuffer sb = new StringBuffer(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append('/'); - sb.append(Bytes.toStringBinary(row)); - Set families = familyMap.entrySet(); - if (families != null) { - Iterator i = familyMap.entrySet().iterator(); - sb.append('/'); - while (i.hasNext()) { - Map.Entry e = (Map.Entry)i.next(); - Collection quals = (Collection)e.getValue(); - if (quals == null || quals.isEmpty()) { - // this is an unqualified family. append the family name and NO ':' - sb.append(Bytes.toStringBinary((byte[])e.getKey())); - } else { - Iterator ii = quals.iterator(); - while (ii.hasNext()) { - sb.append(Bytes.toStringBinary((byte[])e.getKey())); - sb.append(':'); - Object o = ii.next(); - // Puts use byte[] but Deletes use KeyValue - if (o instanceof byte[]) { - sb.append(Bytes.toStringBinary((byte[])o)); - } else if (o instanceof KeyValue) { - sb.append(Bytes.toStringBinary(((KeyValue)o).getQualifier())); - } else { - throw new RuntimeException("object type not handled"); - } - if (ii.hasNext()) { - sb.append(','); - } - } - } - if (i.hasNext()) { - sb.append(','); - } - } - } - if (startTime >= 0 && endTime != Long.MAX_VALUE) { - sb.append('/'); - sb.append(startTime); - if (startTime != endTime) { - sb.append(','); - sb.append(endTime); - } - } else if (endTime != Long.MAX_VALUE) { - sb.append('/'); - sb.append(endTime); - } - if (maxVersions > 1) { - sb.append("?v="); - sb.append(maxVersions); - } - return sb.toString(); - } - - protected String buildMultiRowSpec(final byte[][] rows, int maxVersions) { - StringBuilder sb = new StringBuilder(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append("/multiget/"); - if (rows == null || rows.length == 0) { - return sb.toString(); - } - sb.append("?"); - for(int i=0; i<rows.length; i++) { - byte[] rk = rows[i]; - if (i != 0) { - sb.append('&'); - } - sb.append("row="); - sb.append(Bytes.toStringBinary(rk)); - } - sb.append("&v="); - sb.append(maxVersions); - - return sb.toString(); - } - - protected Result[] buildResultFromModel(final CellSetModel model) { - List<Result> results = new ArrayList<Result>(); - for (RowModel row: model.getRows()) { - List<Cell> kvs = new ArrayList<Cell>(); - for (CellModel cell: row.getCells()) { - byte[][] split = KeyValue.parseColumn(cell.getColumn()); - byte[] column = split[0]; - byte[] qualifier = null; - if (split.length == 1) { - qualifier = HConstants.EMPTY_BYTE_ARRAY; - } else if (split.length == 2) { - qualifier = split[1]; - } else { - throw new IllegalArgumentException("Invalid familyAndQualifier provided."); - } - kvs.add(new KeyValue(row.getKey(), column, qualifier, - cell.getTimestamp(), cell.getValue())); - } - results.add(Result.create(kvs)); - } - return results.toArray(new Result[results.size()]); - } - - protected CellSetModel buildModelFromPut(Put put) { - RowModel row = new RowModel(put.getRow()); - long ts = put.getTimeStamp(); - for (List<Cell> cells: put.getFamilyCellMap().values()) { - for (Cell cell: cells) { - row.addCell(new CellModel(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), - ts != HConstants.LATEST_TIMESTAMP ? ts : cell.getTimestamp(), - CellUtil.cloneValue(cell))); - } - } - CellSetModel model = new CellSetModel(); - model.addRow(row); - return model; - } - - /** - * Constructor - * @param client - * @param name - */ - public RemoteHTable(Client client, String name) { - this(client, HBaseConfiguration.create(), Bytes.toBytes(name)); - } - - /** - * Constructor - * @param client - * @param conf - * @param name - */ - public RemoteHTable(Client client, Configuration conf, String name) { - this(client, conf, Bytes.toBytes(name)); - } - - /** - * Constructor - * @param client - * @param conf - * @param name - */ - public RemoteHTable(Client client, Configuration conf, byte[] name) { - this.client = client; - this.conf = conf; - this.name = name; - this.maxRetries = conf.getInt("hbase.rest.client.max.retries", 10); - this.sleepTime = conf.getLong("hbase.rest.client.sleep", 1000); - } - - public byte[] getTableName() { - return name.clone(); - } - - @Override - public TableName getName() { - return TableName.valueOf(name); - } - - public Configuration getConfiguration() { - return conf; - } - - public HTableDescriptor getTableDescriptor() throws IOException { - StringBuilder sb = new StringBuilder(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append('/'); - sb.append("schema"); - for (int i = 0; i < maxRetries; i++) { - Response response = client.get(sb.toString(), Constants.MIMETYPE_PROTOBUF); - int code = response.getCode(); - switch (code) { - case 200: - TableSchemaModel schema = new TableSchemaModel(); - schema.getObjectFromMessage(response.getBody()); - return schema.getTableDescriptor(); - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("schema request returned " + code); - } - } - throw new IOException("schema request timed out"); - } - - public void close() throws IOException { - client.shutdown(); - } - - public Result get(Get get) throws IOException { - TimeRange range = get.getTimeRange(); - String spec = buildRowSpec(get.getRow(), get.getFamilyMap(), - range.getMin(), range.getMax(), get.getMaxVersions()); - if (get.getFilter() != null) { - LOG.warn("filters not supported on gets"); - } - Result[] results = getResults(spec); - if (results.length > 0) { - if (results.length > 1) { - LOG.warn("too many results for get (" + results.length + ")"); - } - return results[0]; - } else { - return new Result(); - } - } - - public Result[] get(List<Get> gets) throws IOException { - byte[][] rows = new byte[gets.size()][]; - int maxVersions = 1; - int count = 0; - - for(Get g:gets) { - - if ( count == 0 ) { - maxVersions = g.getMaxVersions(); - } else if (g.getMaxVersions() != maxVersions) { - LOG.warn("MaxVersions on Gets do not match, using the first in the list ("+maxVersions+")"); - } - - if (g.getFilter() != null) { - LOG.warn("filters not supported on gets"); - } - - rows[count] = g.getRow(); - count ++; - } - - String spec = buildMultiRowSpec(rows, maxVersions); - - return getResults(spec); - } - - private Result[] getResults(String spec) throws IOException { - for (int i = 0; i < maxRetries; i++) { - Response response = client.get(spec, Constants.MIMETYPE_PROTOBUF); - int code = response.getCode(); - switch (code) { - case 200: - CellSetModel model = new CellSetModel(); - model.getObjectFromMessage(response.getBody()); - Result[] results = buildResultFromModel(model); - if ( results.length > 0) { - return results; - } - // fall through - case 404: - return new Result[0]; - - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("get request returned " + code); - } - } - throw new IOException("get request timed out"); - } - - public boolean exists(Get get) throws IOException { - LOG.warn("exists() is really get(), just use get()"); - Result result = get(get); - return (result != null && !(result.isEmpty())); - } - - /** - * exists(List) is really a list of get() calls. Just use get(). - * @param gets list of Get to test for the existence - */ - public boolean[] existsAll(List<Get> gets) throws IOException { - LOG.warn("exists(List<Get>) is really list of get() calls, just use get()"); - boolean[] results = new boolean[gets.size()]; - for (int i = 0; i < results.length; i++) { - results[i] = exists(gets.get(i)); - } - return results; - } - - @Deprecated - public Boolean[] exists(List<Get> gets) throws IOException { - boolean[] results = existsAll(gets); - Boolean[] objectResults = new Boolean[results.length]; - for (int i = 0; i < results.length; ++i) { - objectResults[i] = results[i]; - } - return objectResults; - } - - public void put(Put put) throws IOException { - CellSetModel model = buildModelFromPut(put); - StringBuilder sb = new StringBuilder(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append('/'); - sb.append(Bytes.toStringBinary(put.getRow())); - for (int i = 0; i < maxRetries; i++) { - Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF, - model.createProtobufOutput()); - int code = response.getCode(); - switch (code) { - case 200: - return; - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("put request failed with " + code); - } - } - throw new IOException("put request timed out"); - } - - public void put(List<Put> puts) throws IOException { - // this is a trick: The gateway accepts multiple rows in a cell set and - // ignores the row specification in the URI - - // separate puts by row - TreeMap<byte[],List<Cell>> map = - new TreeMap<byte[],List<Cell>>(Bytes.BYTES_COMPARATOR); - for (Put put: puts) { - byte[] row = put.getRow(); - List<Cell> cells = map.get(row); - if (cells == null) { - cells = new ArrayList<Cell>(); - map.put(row, cells); - } - for (List<Cell> l: put.getFamilyCellMap().values()) { - cells.addAll(l); - } - } - - // build the cell set - CellSetModel model = new CellSetModel(); - for (Map.Entry<byte[], List<Cell>> e: map.entrySet()) { - RowModel row = new RowModel(e.getKey()); - for (Cell cell: e.getValue()) { - row.addCell(new CellModel(cell)); - } - model.addRow(row); - } - - // build path for multiput - StringBuilder sb = new StringBuilder(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append("/$multiput"); // can be any nonexistent row - for (int i = 0; i < maxRetries; i++) { - Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF, - model.createProtobufOutput()); - int code = response.getCode(); - switch (code) { - case 200: - return; - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("multiput request failed with " + code); - } - } - throw new IOException("multiput request timed out"); - } - - public void delete(Delete delete) throws IOException { - String spec = buildRowSpec(delete.getRow(), delete.getFamilyCellMap(), - delete.getTimeStamp(), delete.getTimeStamp(), 1); - for (int i = 0; i < maxRetries; i++) { - Response response = client.delete(spec); - int code = response.getCode(); - switch (code) { - case 200: - return; - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("delete request failed with " + code); - } - } - throw new IOException("delete request timed out"); - } - - public void delete(List<Delete> deletes) throws IOException { - for (Delete delete: deletes) { - delete(delete); - } - } - - public void flushCommits() throws IOException { - // no-op - } - - class Scanner implements ResultScanner { - - String uri; - - public Scanner(Scan scan) throws IOException { - ScannerModel model; - try { - model = ScannerModel.fromScan(scan); - } catch (Exception e) { - throw new IOException(e); - } - StringBuffer sb = new StringBuffer(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append('/'); - sb.append("scanner"); - for (int i = 0; i < maxRetries; i++) { - Response response = client.post(sb.toString(), - Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - int code = response.getCode(); - switch (code) { - case 201: - uri = response.getLocation(); - return; - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("scan request failed with " + code); - } - } - throw new IOException("scan request timed out"); - } - - @Override - public Result[] next(int nbRows) throws IOException { - StringBuilder sb = new StringBuilder(uri); - sb.append("?n="); - sb.append(nbRows); - for (int i = 0; i < maxRetries; i++) { - Response response = client.get(sb.toString(), - Constants.MIMETYPE_PROTOBUF); - int code = response.getCode(); - switch (code) { - case 200: - CellSetModel model = new CellSetModel(); - model.getObjectFromMessage(response.getBody()); - return buildResultFromModel(model); - case 204: - case 206: - return null; - case 509: - try { - Thread.sleep(sleepTime); - } catch (InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("scanner.next request failed with " + code); - } - } - throw new IOException("scanner.next request timed out"); - } - - @Override - public Result next() throws IOException { - Result[] results = next(1); - if (results == null || results.length < 1) { - return null; - } - return results[0]; - } - - class Iter implements Iterator<Result> { - - Result cache; - - public Iter() { - try { - cache = Scanner.this.next(); - } catch (IOException e) { - LOG.warn(StringUtils.stringifyException(e)); - } - } - - @Override - public boolean hasNext() { - return cache != null; - } - - @Override - public Result next() { - Result result = cache; - try { - cache = Scanner.this.next(); - } catch (IOException e) { - LOG.warn(StringUtils.stringifyException(e)); - cache = null; - } - return result; - } - - @Override - public void remove() { - throw new RuntimeException("remove() not supported"); - } - - } - - @Override - public Iterator<Result> iterator() { - return new Iter(); - } - - @Override - public void close() { - try { - client.delete(uri); - } catch (IOException e) { - LOG.warn(StringUtils.stringifyException(e)); - } - } - - } - - public ResultScanner getScanner(Scan scan) throws IOException { - return new Scanner(scan); - } - - public ResultScanner getScanner(byte[] family) throws IOException { - Scan scan = new Scan(); - scan.addFamily(family); - return new Scanner(scan); - } - - public ResultScanner getScanner(byte[] family, byte[] qualifier) - throws IOException { - Scan scan = new Scan(); - scan.addColumn(family, qualifier); - return new Scanner(scan); - } - - public boolean isAutoFlush() { - return true; - } - - public Result getRowOrBefore(byte[] row, byte[] family) throws IOException { - throw new IOException("getRowOrBefore not supported"); - } - - public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, - byte[] value, Put put) throws IOException { - // column to check-the-value - put.add(new KeyValue(row, family, qualifier, value)); - - CellSetModel model = buildModelFromPut(put); - StringBuilder sb = new StringBuilder(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append('/'); - sb.append(Bytes.toStringBinary(put.getRow())); - sb.append("?check=put"); - - for (int i = 0; i < maxRetries; i++) { - Response response = client.put(sb.toString(), - Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - int code = response.getCode(); - switch (code) { - case 200: - return true; - case 304: // NOT-MODIFIED - return false; - case 509: - try { - Thread.sleep(sleepTime); - } catch (final InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("checkAndPut request failed with " + code); - } - } - throw new IOException("checkAndPut request timed out"); - } - - public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, Put put) throws IOException { - throw new IOException("checkAndPut for non-equal comparison not implemented"); - } - - public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, - byte[] value, Delete delete) throws IOException { - Put put = new Put(row); - // column to check-the-value - put.add(new KeyValue(row, family, qualifier, value)); - CellSetModel model = buildModelFromPut(put); - StringBuilder sb = new StringBuilder(); - sb.append('/'); - sb.append(Bytes.toStringBinary(name)); - sb.append('/'); - sb.append(Bytes.toStringBinary(row)); - sb.append("?check=delete"); - - for (int i = 0; i < maxRetries; i++) { - Response response = client.put(sb.toString(), - Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - int code = response.getCode(); - switch (code) { - case 200: - return true; - case 304: // NOT-MODIFIED - return false; - case 509: - try { - Thread.sleep(sleepTime); - } catch (final InterruptedException e) { - throw (InterruptedIOException)new InterruptedIOException().initCause(e); - } - break; - default: - throw new IOException("checkAndDelete request failed with " + code); - } - } - throw new IOException("checkAndDelete request timed out"); - } - - public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, Delete delete) throws IOException { - throw new IOException("checkAndDelete for non-equal comparison not implemented"); - } - - public Result increment(Increment increment) throws IOException { - throw new IOException("Increment not supported"); - } - - public Result append(Append append) throws IOException { - throw new IOException("Append not supported"); - } - - public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, - long amount) throws IOException { - throw new IOException("incrementColumnValue not supported"); - } - - public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, - long amount, Durability durability) throws IOException { - throw new IOException("incrementColumnValue not supported"); - } - - @Override - public void batch(List<? extends Row> actions, Object[] results) throws IOException { - throw new IOException("batch not supported"); - } - - @Override - public Object[] batch(List<? extends Row> actions) throws IOException { - throw new IOException("batch not supported"); - } - - @Override - public <R> void batchCallback(List<? extends Row> actions, Object[] results, - Batch.Callback<R> callback) throws IOException, InterruptedException { - throw new IOException("batchCallback not supported"); - } - - @Override - public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback) - throws IOException, InterruptedException { - throw new IOException("batchCallback not supported"); - } - - @Override - public CoprocessorRpcChannel coprocessorService(byte[] row) { - throw new UnsupportedOperationException("coprocessorService not implemented"); - } - - @Override - public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, - byte[] startKey, byte[] endKey, Batch.Call<T, R> callable) - throws ServiceException, Throwable { - throw new UnsupportedOperationException("coprocessorService not implemented"); - } - - @Override - public <T extends Service, R> void coprocessorService(Class<T> service, - byte[] startKey, byte[] endKey, Batch.Call<T, R> callable, Batch.Callback<R> callback) - throws ServiceException, Throwable { - throw new UnsupportedOperationException("coprocessorService not implemented"); - } - - @Override - public void mutateRow(RowMutations rm) throws IOException { - throw new IOException("atomicMutation not supported"); - } - - @Override - public void setAutoFlush(boolean autoFlush) { - throw new UnsupportedOperationException("setAutoFlush not implemented"); - } - - @Override - public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) { - throw new UnsupportedOperationException("setAutoFlush not implemented"); - } - - @Override - public void setAutoFlushTo(boolean autoFlush) { - throw new UnsupportedOperationException("setAutoFlushTo not implemented"); - } - - @Override - public long getWriteBufferSize() { - throw new UnsupportedOperationException("getWriteBufferSize not implemented"); - } - - @Override - public void setWriteBufferSize(long writeBufferSize) throws IOException { - throw new IOException("setWriteBufferSize not supported"); - } - - @Override - public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, - long amount, boolean writeToWAL) throws IOException { - throw new IOException("incrementColumnValue not supported"); - } - - @Override - public <R extends Message> Map<byte[], R> batchCoprocessorService( - Descriptors.MethodDescriptor method, Message request, - byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable { - throw new UnsupportedOperationException("batchCoprocessorService not implemented"); - } - - @Override - public <R extends Message> void batchCoprocessorService( - Descriptors.MethodDescriptor method, Message request, - byte[] startKey, byte[] endKey, R responsePrototype, Callback<R> callback) - throws ServiceException, Throwable { - throw new UnsupportedOperationException("batchCoprocessorService not implemented"); - } - - @Override public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, RowMutations rm) throws IOException { - throw new UnsupportedOperationException("checkAndMutate not implemented"); - } -}
http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java deleted file mode 100644 index 871b646..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.client; - -import java.io.InputStream; - -import org.apache.commons.httpclient.Header; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; - -/** - * The HTTP result code, response headers, and body of a HTTP response. - */ [email protected] [email protected] -public class Response { - private int code; - private Header[] headers; - private byte[] body; - private InputStream stream; - - /** - * Constructor - * @param code the HTTP response code - */ - public Response(int code) { - this(code, null, null); - } - - /** - * Constructor - * @param code the HTTP response code - * @param headers the HTTP response headers - */ - public Response(int code, Header[] headers) { - this(code, headers, null); - } - - /** - * Constructor - * @param code the HTTP response code - * @param headers the HTTP response headers - * @param body the response body, can be null - */ - public Response(int code, Header[] headers, byte[] body) { - this.code = code; - this.headers = headers; - this.body = body; - } - - /** - * Constructor - * @param code the HTTP response code - * @param headers headers the HTTP response headers - * @param body the response body, can be null - * @param in Inputstream if the response had one. - */ - public Response(int code, Header[] headers, byte[] body, InputStream in) { - this.code = code; - this.headers = headers; - this.body = body; - this.stream = in; - } - - /** - * @return the HTTP response code - */ - public int getCode() { - return code; - } - - /** - * Gets the input stream instance. - * - * @return an instance of InputStream class. - */ - public InputStream getStream(){ - return this.stream; - } - - /** - * @return the HTTP response headers - */ - public Header[] getHeaders() { - return headers; - } - - public String getHeader(String key) { - for (Header header: headers) { - if (header.getName().equalsIgnoreCase(key)) { - return header.getValue(); - } - } - return null; - } - - /** - * @return the value of the Location header - */ - public String getLocation() { - return getHeader("Location"); - } - - /** - * @return true if a response body was sent - */ - public boolean hasBody() { - return body != null; - } - - /** - * @return the HTTP response body - */ - public byte[] getBody() { - return body; - } - - /** - * @param code the HTTP response code - */ - public void setCode(int code) { - this.code = code; - } - - /** - * @param headers the HTTP response headers - */ - public void setHeaders(Header[] headers) { - this.headers = headers; - } - - /** - * @param body the response body - */ - public void setBody(byte[] body) { - this.body = body; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java deleted file mode 100644 index 6d68cdd..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java +++ /dev/null @@ -1,82 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.rest.filter; - -import static org.apache.hadoop.hbase.rest.Constants.REST_AUTHENTICATION_PRINCIPAL; -import static org.apache.hadoop.hbase.rest.Constants.REST_DNS_INTERFACE; -import static org.apache.hadoop.hbase.rest.Constants.REST_DNS_NAMESERVER; - -import java.io.IOException; -import java.util.Map; -import java.util.Properties; - -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.util.Strings; -import org.apache.hadoop.net.DNS; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.authentication.server.AuthenticationFilter; - -public class AuthFilter extends AuthenticationFilter { - private static final Log LOG = LogFactory.getLog(AuthFilter.class); - private static final String REST_PREFIX = "hbase.rest.authentication."; - private static final int REST_PREFIX_LEN = REST_PREFIX.length(); - - /** - * Returns the configuration to be used by the authentication filter - * to initialize the authentication handler. - * - * This filter retrieves all HBase configurations and passes those started - * with REST_PREFIX to the authentication handler. It is useful to support - * plugging different authentication handlers. - */ - @Override - protected Properties getConfiguration( - String configPrefix, FilterConfig filterConfig) throws ServletException { - Properties props = super.getConfiguration(configPrefix, filterConfig); - //setting the cookie path to root '/' so it is used for all resources. - props.setProperty(AuthenticationFilter.COOKIE_PATH, "/"); - - Configuration conf = HBaseConfiguration.create(); - for (Map.Entry<String, String> entry : conf) { - String name = entry.getKey(); - if (name.startsWith(REST_PREFIX)) { - String value = entry.getValue(); - if(name.equals(REST_AUTHENTICATION_PRINCIPAL)) { - try { - String machineName = Strings.domainNamePointerToHostName( - DNS.getDefaultHost(conf.get(REST_DNS_INTERFACE, "default"), - conf.get(REST_DNS_NAMESERVER, "default"))); - value = SecurityUtil.getServerPrincipal(value, machineName); - } catch (IOException ie) { - throw new ServletException("Failed to retrieve server principal", ie); - } - } - LOG.debug("Setting property " + name + "=" + value); - name = name.substring(REST_PREFIX_LEN); - props.setProperty(name, value); - } - } - return props; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.java deleted file mode 100644 index 02957e9..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.filter; - -import java.io.IOException; -import java.util.zip.GZIPInputStream; - -import javax.servlet.ServletInputStream; -import javax.servlet.http.HttpServletRequest; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - [email protected] -public class GZIPRequestStream extends ServletInputStream -{ - private GZIPInputStream in; - - public GZIPRequestStream(HttpServletRequest request) throws IOException { - this.in = new GZIPInputStream(request.getInputStream()); - } - - @Override - public int read() throws IOException { - return in.read(); - } - - @Override - public int read(byte[] b) throws IOException { - return in.read(b); - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - return in.read(b, off, len); - } - - @Override - public void close() throws IOException { - in.close(); - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.java deleted file mode 100644 index 361e442..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.filter; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; - -import javax.servlet.ServletInputStream; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - [email protected] -public class GZIPRequestWrapper extends HttpServletRequestWrapper { - private ServletInputStream is; - private BufferedReader reader; - - public GZIPRequestWrapper(HttpServletRequest request) throws IOException { - super(request); - this.is = new GZIPRequestStream(request); - this.reader = new BufferedReader(new InputStreamReader(this.is)); - } - - @Override - public ServletInputStream getInputStream() throws IOException { - return is; - } - - @Override - public BufferedReader getReader() throws IOException { - return reader; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java deleted file mode 100644 index cc74f9c..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.filter; - -import java.io.IOException; -import java.util.zip.GZIPOutputStream; - -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - [email protected] -public class GZIPResponseStream extends ServletOutputStream -{ - private HttpServletResponse response; - private GZIPOutputStream out; - - public GZIPResponseStream(HttpServletResponse response) throws IOException { - this.response = response; - this.out = new GZIPOutputStream(response.getOutputStream()); - response.addHeader("Content-Encoding", "gzip"); - } - - public void resetBuffer() { - if (out != null && !response.isCommitted()) { - response.setHeader("Content-Encoding", null); - } - out = null; - } - - @Override - public void write(int b) throws IOException { - out.write(b); - } - - @Override - public void write(byte[] b) throws IOException { - out.write(b); - } - - @Override - public void write(byte[] b, int off, int len) throws IOException { - out.write(b, off, len); - } - - @Override - public void close() throws IOException { - finish(); - out.close(); - } - - @Override - public void flush() throws IOException { - out.flush(); - } - - public void finish() throws IOException { - out.finish(); - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java deleted file mode 100644 index 2cfea1b..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.filter; - -import java.io.IOException; -import java.io.PrintWriter; - -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponseWrapper; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - [email protected] -public class GZIPResponseWrapper extends HttpServletResponseWrapper { - private HttpServletResponse response; - private ServletOutputStream os; - private PrintWriter writer; - private boolean compress = true; - - public GZIPResponseWrapper(HttpServletResponse response) { - super(response); - this.response = response; - } - - @Override - public void setStatus(int status) { - super.setStatus(status); - if (status < 200 || status >= 300) { - compress = false; - } - } - - @Override - public void addHeader(String name, String value) { - if (!"content-length".equalsIgnoreCase(name)) { - super.addHeader(name, value); - } - } - - @Override - public void setContentLength(int length) { - // do nothing - } - - @Override - public void setIntHeader(String name, int value) { - if (!"content-length".equalsIgnoreCase(name)) { - super.setIntHeader(name, value); - } - } - - @Override - public void setHeader(String name, String value) { - if (!"content-length".equalsIgnoreCase(name)) { - super.setHeader(name, value); - } - } - - @Override - public void flushBuffer() throws IOException { - if (writer != null) { - writer.flush(); - } - if (os != null && (os instanceof GZIPResponseStream)) { - ((GZIPResponseStream)os).finish(); - } else { - getResponse().flushBuffer(); - } - } - - @Override - public void reset() { - super.reset(); - if (os != null && (os instanceof GZIPResponseStream)) { - ((GZIPResponseStream)os).resetBuffer(); - } - writer = null; - os = null; - compress = true; - } - - @Override - public void resetBuffer() { - super.resetBuffer(); - if (os != null && (os instanceof GZIPResponseStream)) { - ((GZIPResponseStream)os).resetBuffer(); - } - writer = null; - os = null; - } - - @Override - public void sendError(int status, String msg) throws IOException { - resetBuffer(); - super.sendError(status, msg); - } - - @Override - public void sendError(int status) throws IOException { - resetBuffer(); - super.sendError(status); - } - - @Override - public void sendRedirect(String location) throws IOException { - resetBuffer(); - super.sendRedirect(location); - } - - @Override - public ServletOutputStream getOutputStream() throws IOException { - if (os == null) { - if (!response.isCommitted() && compress) { - os = (ServletOutputStream)new GZIPResponseStream(response); - } else { - os = response.getOutputStream(); - } - } - return os; - } - - @Override - public PrintWriter getWriter() throws IOException { - if (writer == null) { - writer = new PrintWriter(getOutputStream()); - } - return writer; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java deleted file mode 100644 index 4995b86..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.filter; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.HashSet; -import java.util.Set; -import java.util.StringTokenizer; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.HBaseInterfaceAudience; - [email protected](HBaseInterfaceAudience.CONFIG) -public class GzipFilter implements Filter { - private Set<String> mimeTypes = new HashSet<String>(); - - @Override - public void init(FilterConfig filterConfig) throws ServletException { - String s = filterConfig.getInitParameter("mimeTypes"); - if (s != null) { - StringTokenizer tok = new StringTokenizer(s, ",", false); - while (tok.hasMoreTokens()) { - mimeTypes.add(tok.nextToken()); - } - } - } - - @Override - public void destroy() { - } - - @Override - public void doFilter(ServletRequest req, ServletResponse rsp, - FilterChain chain) throws IOException, ServletException { - HttpServletRequest request = (HttpServletRequest)req; - HttpServletResponse response = (HttpServletResponse)rsp; - String contentEncoding = request.getHeader("content-encoding"); - String acceptEncoding = request.getHeader("accept-encoding"); - String contentType = request.getHeader("content-type"); - if ((contentEncoding != null) && - (contentEncoding.toLowerCase().indexOf("gzip") > -1)) { - request = new GZIPRequestWrapper(request); - } - if (((acceptEncoding != null) && - (acceptEncoding.toLowerCase().indexOf("gzip") > -1)) || - ((contentType != null) && mimeTypes.contains(contentType))) { - response = new GZIPResponseWrapper(response); - } - chain.doFilter(request, response); - if (response instanceof GZIPResponseWrapper) { - OutputStream os = response.getOutputStream(); - if (os instanceof GZIPResponseStream) { - ((GZIPResponseStream)os).finish(); - } - } - } - -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java deleted file mode 100644 index 7c0c11f..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.model; - -import java.io.IOException; -import java.io.Serializable; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.bind.annotation.XmlValue; - -import org.apache.hadoop.hbase.util.ByteStringer; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; -import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; -import org.codehaus.jackson.annotate.JsonProperty; - -/** - * Representation of a cell. A cell is a single value associated a column and - * optional qualifier, and either the timestamp when it was stored or the user- - * provided timestamp if one was explicitly supplied. - * - * <pre> - * <complexType name="Cell"> - * <sequence> - * <element name="value" maxOccurs="1" minOccurs="1"> - * <simpleType> - * <restriction base="base64Binary"/> - * </simpleType> - * </element> - * </sequence> - * <attribute name="column" type="base64Binary" /> - * <attribute name="timestamp" type="int" /> - * </complexType> - * </pre> - */ -@XmlRootElement(name="Cell") -@XmlAccessorType(XmlAccessType.FIELD) [email protected] -public class CellModel implements ProtobufMessageHandler, Serializable { - private static final long serialVersionUID = 1L; - - @JsonProperty("column") - @XmlAttribute - private byte[] column; - - @JsonProperty("timestamp") - @XmlAttribute - private long timestamp = HConstants.LATEST_TIMESTAMP; - - @JsonProperty("$") - @XmlValue - private byte[] value; - - /** - * Default constructor - */ - public CellModel() {} - - /** - * Constructor - * @param column - * @param value - */ - public CellModel(byte[] column, byte[] value) { - this(column, HConstants.LATEST_TIMESTAMP, value); - } - - /** - * Constructor - * @param column - * @param qualifier - * @param value - */ - public CellModel(byte[] column, byte[] qualifier, byte[] value) { - this(column, qualifier, HConstants.LATEST_TIMESTAMP, value); - } - - /** - * Constructor from KeyValue - * @param cell - */ - public CellModel(org.apache.hadoop.hbase.Cell cell) { - this(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cell.getTimestamp(), CellUtil - .cloneValue(cell)); - } - - /** - * Constructor - * @param column - * @param timestamp - * @param value - */ - public CellModel(byte[] column, long timestamp, byte[] value) { - this.column = column; - this.timestamp = timestamp; - this.value = value; - } - - /** - * Constructor - * @param column - * @param qualifier - * @param timestamp - * @param value - */ - public CellModel(byte[] column, byte[] qualifier, long timestamp, - byte[] value) { - this.column = KeyValue.makeColumn(column, qualifier); - this.timestamp = timestamp; - this.value = value; - } - - /** - * @return the column - */ - public byte[] getColumn() { - return column; - } - - /** - * @param column the column to set - */ - public void setColumn(byte[] column) { - this.column = column; - } - - /** - * @return true if the timestamp property has been specified by the - * user - */ - public boolean hasUserTimestamp() { - return timestamp != HConstants.LATEST_TIMESTAMP; - } - - /** - * @return the timestamp - */ - public long getTimestamp() { - return timestamp; - } - - /** - * @param timestamp the timestamp to set - */ - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } - - /** - * @return the value - */ - public byte[] getValue() { - return value; - } - - /** - * @param value the value to set - */ - public void setValue(byte[] value) { - this.value = value; - } - - @Override - public byte[] createProtobufOutput() { - Cell.Builder builder = Cell.newBuilder(); - builder.setColumn(ByteStringer.wrap(getColumn())); - builder.setData(ByteStringer.wrap(getValue())); - if (hasUserTimestamp()) { - builder.setTimestamp(getTimestamp()); - } - return builder.build().toByteArray(); - } - - @Override - public ProtobufMessageHandler getObjectFromMessage(byte[] message) - throws IOException { - Cell.Builder builder = Cell.newBuilder(); - builder.mergeFrom(message); - setColumn(builder.getColumn().toByteArray()); - setValue(builder.getData().toByteArray()); - if (builder.hasTimestamp()) { - setTimestamp(builder.getTimestamp()); - } - return this; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java deleted file mode 100644 index 094da36..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.model; - -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.hadoop.hbase.util.ByteStringer; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; -import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; -import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet; - -/** - * Representation of a grouping of cells. May contain cells from more than - * one row. Encapsulates RowModel and CellModel models. - * - * <pre> - * <complexType name="CellSet"> - * <sequence> - * <element name="row" type="tns:Row" maxOccurs="unbounded" - * minOccurs="1"></element> - * </sequence> - * </complexType> - * - * <complexType name="Row"> - * <sequence> - * <element name="key" type="base64Binary"></element> - * <element name="cell" type="tns:Cell" - * maxOccurs="unbounded" minOccurs="1"></element> - * </sequence> - * </complexType> - * - * <complexType name="Cell"> - * <sequence> - * <element name="value" maxOccurs="1" minOccurs="1"> - * <simpleType> - * <restriction base="base64Binary"/> - * </simpleType> - * </element> - * </sequence> - * <attribute name="column" type="base64Binary" /> - * <attribute name="timestamp" type="int" /> - * </complexType> - * </pre> - */ -@XmlRootElement(name="CellSet") -@XmlAccessorType(XmlAccessType.FIELD) [email protected] -public class CellSetModel implements Serializable, ProtobufMessageHandler { - - private static final long serialVersionUID = 1L; - - @XmlElement(name="Row") - private List<RowModel> rows; - - /** - * Constructor - */ - public CellSetModel() { - this.rows = new ArrayList<RowModel>(); - } - - /** - * @param rows the rows - */ - public CellSetModel(List<RowModel> rows) { - super(); - this.rows = rows; - } - - /** - * Add a row to this cell set - * @param row the row - */ - public void addRow(RowModel row) { - rows.add(row); - } - - /** - * @return the rows - */ - public List<RowModel> getRows() { - return rows; - } - - @Override - public byte[] createProtobufOutput() { - CellSet.Builder builder = CellSet.newBuilder(); - for (RowModel row: getRows()) { - CellSet.Row.Builder rowBuilder = CellSet.Row.newBuilder(); - rowBuilder.setKey(ByteStringer.wrap(row.getKey())); - for (CellModel cell: row.getCells()) { - Cell.Builder cellBuilder = Cell.newBuilder(); - cellBuilder.setColumn(ByteStringer.wrap(cell.getColumn())); - cellBuilder.setData(ByteStringer.wrap(cell.getValue())); - if (cell.hasUserTimestamp()) { - cellBuilder.setTimestamp(cell.getTimestamp()); - } - rowBuilder.addValues(cellBuilder); - } - builder.addRows(rowBuilder); - } - return builder.build().toByteArray(); - } - - @Override - public ProtobufMessageHandler getObjectFromMessage(byte[] message) - throws IOException { - CellSet.Builder builder = CellSet.newBuilder(); - builder.mergeFrom(message); - for (CellSet.Row row: builder.getRowsList()) { - RowModel rowModel = new RowModel(row.getKey().toByteArray()); - for (Cell cell: row.getValuesList()) { - long timestamp = HConstants.LATEST_TIMESTAMP; - if (cell.hasTimestamp()) { - timestamp = cell.getTimestamp(); - } - rowModel.addCell( - new CellModel(cell.getColumn().toByteArray(), timestamp, - cell.getData().toByteArray())); - } - addRow(rowModel); - } - return this; - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java deleted file mode 100644 index ba0eed8..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.model; - -import java.io.Serializable; -import java.util.LinkedHashMap; -import java.util.Map; - -import javax.xml.bind.annotation.XmlAnyAttribute; -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.namespace.QName; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HConstants; -import org.codehaus.jackson.annotate.JsonAnyGetter; -import org.codehaus.jackson.annotate.JsonAnySetter; - -/** - * Representation of a column family schema. - * - * <pre> - * <complexType name="ColumnSchema"> - * <attribute name="name" type="string"></attribute> - * <anyAttribute></anyAttribute> - * </complexType> - * </pre> - */ -@XmlRootElement(name="ColumnSchema") [email protected] -public class ColumnSchemaModel implements Serializable { - private static final long serialVersionUID = 1L; - private static QName BLOCKCACHE = new QName(HColumnDescriptor.BLOCKCACHE); - private static QName BLOCKSIZE = new QName(HColumnDescriptor.BLOCKSIZE); - private static QName BLOOMFILTER = new QName(HColumnDescriptor.BLOOMFILTER); - private static QName COMPRESSION = new QName(HColumnDescriptor.COMPRESSION); - private static QName IN_MEMORY = new QName(HConstants.IN_MEMORY); - private static QName TTL = new QName(HColumnDescriptor.TTL); - private static QName VERSIONS = new QName(HConstants.VERSIONS); - - private String name; - private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>(); - - /** - * Default constructor - */ - public ColumnSchemaModel() {} - - /** - * Add an attribute to the column family schema - * @param name the attribute name - * @param value the attribute value - */ - @JsonAnySetter - public void addAttribute(String name, Object value) { - attrs.put(new QName(name), value); - } - - /** - * @param name the attribute name - * @return the attribute value - */ - public String getAttribute(String name) { - Object o = attrs.get(new QName(name)); - return o != null ? o.toString(): null; - } - - /** - * @return the column name - */ - @XmlAttribute - public String getName() { - return name; - } - - /** - * @return the map for holding unspecified (user) attributes - */ - @XmlAnyAttribute - @JsonAnyGetter - public Map<QName,Object> getAny() { - return attrs; - } - - /** - * @param name the table name - */ - public void setName(String name) { - this.name = name; - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("{ NAME => '"); - sb.append(name); - sb.append('\''); - for (Map.Entry<QName,Object> e: attrs.entrySet()) { - sb.append(", "); - sb.append(e.getKey().getLocalPart()); - sb.append(" => '"); - sb.append(e.getValue().toString()); - sb.append('\''); - } - sb.append(" }"); - return sb.toString(); - } - - // getters and setters for common schema attributes - - // cannot be standard bean type getters and setters, otherwise this would - // confuse JAXB - - /** - * @return true if the BLOCKCACHE attribute is present and true - */ - public boolean __getBlockcache() { - Object o = attrs.get(BLOCKCACHE); - return o != null ? - Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE; - } - - /** - * @return the value of the BLOCKSIZE attribute or its default if it is unset - */ - public int __getBlocksize() { - Object o = attrs.get(BLOCKSIZE); - return o != null ? - Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE; - } - - /** - * @return the value of the BLOOMFILTER attribute or its default if unset - */ - public String __getBloomfilter() { - Object o = attrs.get(BLOOMFILTER); - return o != null ? o.toString() : HColumnDescriptor.DEFAULT_BLOOMFILTER; - } - - /** - * @return the value of the COMPRESSION attribute or its default if unset - */ - public String __getCompression() { - Object o = attrs.get(COMPRESSION); - return o != null ? o.toString() : HColumnDescriptor.DEFAULT_COMPRESSION; - } - - /** - * @return true if the IN_MEMORY attribute is present and true - */ - public boolean __getInMemory() { - Object o = attrs.get(IN_MEMORY); - return o != null ? - Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY; - } - - /** - * @return the value of the TTL attribute or its default if it is unset - */ - public int __getTTL() { - Object o = attrs.get(TTL); - return o != null ? - Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_TTL; - } - - /** - * @return the value of the VERSIONS attribute or its default if it is unset - */ - public int __getVersions() { - Object o = attrs.get(VERSIONS); - return o != null ? - Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS; - } - - /** - * @param value the desired value of the BLOCKSIZE attribute - */ - public void __setBlocksize(int value) { - attrs.put(BLOCKSIZE, Integer.toString(value)); - } - - /** - * @param value the desired value of the BLOCKCACHE attribute - */ - public void __setBlockcache(boolean value) { - attrs.put(BLOCKCACHE, Boolean.toString(value)); - } - - public void __setBloomfilter(String value) { - attrs.put(BLOOMFILTER, value); - } - - /** - * @param value the desired value of the COMPRESSION attribute - */ - public void __setCompression(String value) { - attrs.put(COMPRESSION, value); - } - - /** - * @param value the desired value of the IN_MEMORY attribute - */ - public void __setInMemory(boolean value) { - attrs.put(IN_MEMORY, Boolean.toString(value)); - } - - /** - * @param value the desired value of the TTL attribute - */ - public void __setTTL(int value) { - attrs.put(TTL, Integer.toString(value)); - } - - /** - * @param value the desired value of the VERSIONS attribute - */ - public void __setVersions(int value) { - attrs.put(VERSIONS, Integer.toString(value)); - } -} http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java deleted file mode 100644 index 596c754..0000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.rest.model; - -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; -import org.codehaus.jackson.annotate.JsonProperty; - -/** - * Representation of a row. A row is a related set of cells, grouped by common - * row key. RowModels do not appear in results by themselves. They are always - * encapsulated within CellSetModels. - * - * <pre> - * <complexType name="Row"> - * <sequence> - * <element name="key" type="base64Binary"></element> - * <element name="cell" type="tns:Cell" - * maxOccurs="unbounded" minOccurs="1"></element> - * </sequence> - * </complexType> - * </pre> - */ -@XmlRootElement(name="Row") -@XmlAccessorType(XmlAccessType.FIELD) [email protected] -public class RowModel implements ProtobufMessageHandler, Serializable { - private static final long serialVersionUID = 1L; - - @JsonProperty("key") - @XmlAttribute - private byte[] key; - - @JsonProperty("Cell") - @XmlElement(name="Cell") - private List<CellModel> cells = new ArrayList<CellModel>(); - - - /** - * Default constructor - */ - public RowModel() { } - - /** - * Constructor - * @param key the row key - */ - public RowModel(final String key) { - this(key.getBytes()); - } - - /** - * Constructor - * @param key the row key - */ - public RowModel(final byte[] key) { - this.key = key; - cells = new ArrayList<CellModel>(); - } - - /** - * Constructor - * @param key the row key - * @param cells the cells - */ - public RowModel(final String key, final List<CellModel> cells) { - this(key.getBytes(), cells); - } - - /** - * Constructor - * @param key the row key - * @param cells the cells - */ - public RowModel(final byte[] key, final List<CellModel> cells) { - this.key = key; - this.cells = cells; - } - - /** - * Adds a cell to the list of cells for this row - * @param cell the cell - */ - public void addCell(CellModel cell) { - cells.add(cell); - } - - /** - * @return the row key - */ - public byte[] getKey() { - return key; - } - - /** - * @param key the row key - */ - public void setKey(byte[] key) { - this.key = key; - } - - /** - * @return the cells - */ - public List<CellModel> getCells() { - return cells; - } - - @Override - public byte[] createProtobufOutput() { - // there is no standalone row protobuf message - throw new UnsupportedOperationException( - "no protobuf equivalent to RowModel"); - } - - @Override - public ProtobufMessageHandler getObjectFromMessage(byte[] message) - throws IOException { - // there is no standalone row protobuf message - throw new UnsupportedOperationException( - "no protobuf equivalent to RowModel"); - } -}
