http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java new file mode 100644 index 0000000..cc74f9c --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java @@ -0,0 +1,78 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.filter; + +import java.io.IOException; +import java.util.zip.GZIPOutputStream; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + [email protected] +public class GZIPResponseStream extends ServletOutputStream +{ + private HttpServletResponse response; + private GZIPOutputStream out; + + public GZIPResponseStream(HttpServletResponse response) throws IOException { + this.response = response; + this.out = new GZIPOutputStream(response.getOutputStream()); + response.addHeader("Content-Encoding", "gzip"); + } + + public void resetBuffer() { + if (out != null && !response.isCommitted()) { + response.setHeader("Content-Encoding", null); + } + out = null; + } + + @Override + public void write(int b) throws IOException { + out.write(b); + } + + @Override + public void write(byte[] b) throws IOException { + out.write(b); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + out.write(b, off, len); + } + + @Override + public void close() throws IOException { + finish(); + out.close(); + } + + @Override + public void flush() throws IOException { + out.flush(); + } + + public void finish() throws IOException { + out.finish(); + } +}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java new file mode 100644 index 0000000..2cfea1b --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java @@ -0,0 +1,147 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.filter; + +import java.io.IOException; +import java.io.PrintWriter; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpServletResponseWrapper; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + [email protected] +public class GZIPResponseWrapper extends HttpServletResponseWrapper { + private HttpServletResponse response; + private ServletOutputStream os; + private PrintWriter writer; + private boolean compress = true; + + public GZIPResponseWrapper(HttpServletResponse response) { + super(response); + this.response = response; + } + + @Override + public void setStatus(int status) { + super.setStatus(status); + if (status < 200 || status >= 300) { + compress = false; + } + } + + @Override + public void addHeader(String name, String value) { + if (!"content-length".equalsIgnoreCase(name)) { + super.addHeader(name, value); + } + } + + @Override + public void setContentLength(int length) { + // do nothing + } + + @Override + public void setIntHeader(String name, int value) { + if (!"content-length".equalsIgnoreCase(name)) { + super.setIntHeader(name, value); + } + } + + @Override + public void setHeader(String name, String value) { + if (!"content-length".equalsIgnoreCase(name)) { + super.setHeader(name, value); + } + } + + @Override + public void flushBuffer() throws IOException { + if (writer != null) { + writer.flush(); + } + if (os != null && (os instanceof GZIPResponseStream)) { + ((GZIPResponseStream)os).finish(); + } else { + getResponse().flushBuffer(); + } + } + + @Override + public void reset() { + super.reset(); + if (os != null && (os instanceof GZIPResponseStream)) { + ((GZIPResponseStream)os).resetBuffer(); + } + writer = null; + os = null; + compress = true; + } + + @Override + public void resetBuffer() { + super.resetBuffer(); + if (os != null && (os instanceof GZIPResponseStream)) { + ((GZIPResponseStream)os).resetBuffer(); + } + writer = null; + os = null; + } + + @Override + public void sendError(int status, String msg) throws IOException { + resetBuffer(); + super.sendError(status, msg); + } + + @Override + public void sendError(int status) throws IOException { + resetBuffer(); + super.sendError(status); + } + + @Override + public void sendRedirect(String location) throws IOException { + resetBuffer(); + super.sendRedirect(location); + } + + @Override + public ServletOutputStream getOutputStream() throws IOException { + if (os == null) { + if (!response.isCommitted() && compress) { + os = (ServletOutputStream)new GZIPResponseStream(response); + } else { + os = response.getOutputStream(); + } + } + return os; + } + + @Override + public PrintWriter getWriter() throws IOException { + if (writer == null) { + writer = new PrintWriter(getOutputStream()); + } + return writer; + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java new file mode 100644 index 0000000..4995b86 --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java @@ -0,0 +1,85 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.filter; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.HashSet; +import java.util.Set; +import java.util.StringTokenizer; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HBaseInterfaceAudience; + [email protected](HBaseInterfaceAudience.CONFIG) +public class GzipFilter implements Filter { + private Set<String> mimeTypes = new HashSet<String>(); + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String s = filterConfig.getInitParameter("mimeTypes"); + if (s != null) { + StringTokenizer tok = new StringTokenizer(s, ",", false); + while (tok.hasMoreTokens()) { + mimeTypes.add(tok.nextToken()); + } + } + } + + @Override + public void destroy() { + } + + @Override + public void doFilter(ServletRequest req, ServletResponse rsp, + FilterChain chain) throws IOException, ServletException { + HttpServletRequest request = (HttpServletRequest)req; + HttpServletResponse response = (HttpServletResponse)rsp; + String contentEncoding = request.getHeader("content-encoding"); + String acceptEncoding = request.getHeader("accept-encoding"); + String contentType = request.getHeader("content-type"); + if ((contentEncoding != null) && + (contentEncoding.toLowerCase().indexOf("gzip") > -1)) { + request = new GZIPRequestWrapper(request); + } + if (((acceptEncoding != null) && + (acceptEncoding.toLowerCase().indexOf("gzip") > -1)) || + ((contentType != null) && mimeTypes.contains(contentType))) { + response = new GZIPResponseWrapper(response); + } + chain.doFilter(request, response); + if (response instanceof GZIPResponseWrapper) { + OutputStream os = response.getOutputStream(); + if (os instanceof GZIPResponseStream) { + ((GZIPResponseStream)os).finish(); + } + } + } + +} http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java new file mode 100644 index 0000000..7c0c11f --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java @@ -0,0 +1,209 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.model; + +import java.io.IOException; +import java.io.Serializable; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlValue; + +import org.apache.hadoop.hbase.util.ByteStringer; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; +import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; +import org.codehaus.jackson.annotate.JsonProperty; + +/** + * Representation of a cell. A cell is a single value associated a column and + * optional qualifier, and either the timestamp when it was stored or the user- + * provided timestamp if one was explicitly supplied. + * + * <pre> + * <complexType name="Cell"> + * <sequence> + * <element name="value" maxOccurs="1" minOccurs="1"> + * <simpleType> + * <restriction base="base64Binary"/> + * </simpleType> + * </element> + * </sequence> + * <attribute name="column" type="base64Binary" /> + * <attribute name="timestamp" type="int" /> + * </complexType> + * </pre> + */ +@XmlRootElement(name="Cell") +@XmlAccessorType(XmlAccessType.FIELD) [email protected] +public class CellModel implements ProtobufMessageHandler, Serializable { + private static final long serialVersionUID = 1L; + + @JsonProperty("column") + @XmlAttribute + private byte[] column; + + @JsonProperty("timestamp") + @XmlAttribute + private long timestamp = HConstants.LATEST_TIMESTAMP; + + @JsonProperty("$") + @XmlValue + private byte[] value; + + /** + * Default constructor + */ + public CellModel() {} + + /** + * Constructor + * @param column + * @param value + */ + public CellModel(byte[] column, byte[] value) { + this(column, HConstants.LATEST_TIMESTAMP, value); + } + + /** + * Constructor + * @param column + * @param qualifier + * @param value + */ + public CellModel(byte[] column, byte[] qualifier, byte[] value) { + this(column, qualifier, HConstants.LATEST_TIMESTAMP, value); + } + + /** + * Constructor from KeyValue + * @param cell + */ + public CellModel(org.apache.hadoop.hbase.Cell cell) { + this(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cell.getTimestamp(), CellUtil + .cloneValue(cell)); + } + + /** + * Constructor + * @param column + * @param timestamp + * @param value + */ + public CellModel(byte[] column, long timestamp, byte[] value) { + this.column = column; + this.timestamp = timestamp; + this.value = value; + } + + /** + * Constructor + * @param column + * @param qualifier + * @param timestamp + * @param value + */ + public CellModel(byte[] column, byte[] qualifier, long timestamp, + byte[] value) { + this.column = KeyValue.makeColumn(column, qualifier); + this.timestamp = timestamp; + this.value = value; + } + + /** + * @return the column + */ + public byte[] getColumn() { + return column; + } + + /** + * @param column the column to set + */ + public void setColumn(byte[] column) { + this.column = column; + } + + /** + * @return true if the timestamp property has been specified by the + * user + */ + public boolean hasUserTimestamp() { + return timestamp != HConstants.LATEST_TIMESTAMP; + } + + /** + * @return the timestamp + */ + public long getTimestamp() { + return timestamp; + } + + /** + * @param timestamp the timestamp to set + */ + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + /** + * @return the value + */ + public byte[] getValue() { + return value; + } + + /** + * @param value the value to set + */ + public void setValue(byte[] value) { + this.value = value; + } + + @Override + public byte[] createProtobufOutput() { + Cell.Builder builder = Cell.newBuilder(); + builder.setColumn(ByteStringer.wrap(getColumn())); + builder.setData(ByteStringer.wrap(getValue())); + if (hasUserTimestamp()) { + builder.setTimestamp(getTimestamp()); + } + return builder.build().toByteArray(); + } + + @Override + public ProtobufMessageHandler getObjectFromMessage(byte[] message) + throws IOException { + Cell.Builder builder = Cell.newBuilder(); + builder.mergeFrom(message); + setColumn(builder.getColumn().toByteArray()); + setValue(builder.getData().toByteArray()); + if (builder.hasTimestamp()) { + setTimestamp(builder.getTimestamp()); + } + return this; + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java new file mode 100644 index 0000000..094da36 --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java @@ -0,0 +1,152 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.model; + +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.hbase.util.ByteStringer; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; +import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; +import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet; + +/** + * Representation of a grouping of cells. May contain cells from more than + * one row. Encapsulates RowModel and CellModel models. + * + * <pre> + * <complexType name="CellSet"> + * <sequence> + * <element name="row" type="tns:Row" maxOccurs="unbounded" + * minOccurs="1"></element> + * </sequence> + * </complexType> + * + * <complexType name="Row"> + * <sequence> + * <element name="key" type="base64Binary"></element> + * <element name="cell" type="tns:Cell" + * maxOccurs="unbounded" minOccurs="1"></element> + * </sequence> + * </complexType> + * + * <complexType name="Cell"> + * <sequence> + * <element name="value" maxOccurs="1" minOccurs="1"> + * <simpleType> + * <restriction base="base64Binary"/> + * </simpleType> + * </element> + * </sequence> + * <attribute name="column" type="base64Binary" /> + * <attribute name="timestamp" type="int" /> + * </complexType> + * </pre> + */ +@XmlRootElement(name="CellSet") +@XmlAccessorType(XmlAccessType.FIELD) [email protected] +public class CellSetModel implements Serializable, ProtobufMessageHandler { + + private static final long serialVersionUID = 1L; + + @XmlElement(name="Row") + private List<RowModel> rows; + + /** + * Constructor + */ + public CellSetModel() { + this.rows = new ArrayList<RowModel>(); + } + + /** + * @param rows the rows + */ + public CellSetModel(List<RowModel> rows) { + super(); + this.rows = rows; + } + + /** + * Add a row to this cell set + * @param row the row + */ + public void addRow(RowModel row) { + rows.add(row); + } + + /** + * @return the rows + */ + public List<RowModel> getRows() { + return rows; + } + + @Override + public byte[] createProtobufOutput() { + CellSet.Builder builder = CellSet.newBuilder(); + for (RowModel row: getRows()) { + CellSet.Row.Builder rowBuilder = CellSet.Row.newBuilder(); + rowBuilder.setKey(ByteStringer.wrap(row.getKey())); + for (CellModel cell: row.getCells()) { + Cell.Builder cellBuilder = Cell.newBuilder(); + cellBuilder.setColumn(ByteStringer.wrap(cell.getColumn())); + cellBuilder.setData(ByteStringer.wrap(cell.getValue())); + if (cell.hasUserTimestamp()) { + cellBuilder.setTimestamp(cell.getTimestamp()); + } + rowBuilder.addValues(cellBuilder); + } + builder.addRows(rowBuilder); + } + return builder.build().toByteArray(); + } + + @Override + public ProtobufMessageHandler getObjectFromMessage(byte[] message) + throws IOException { + CellSet.Builder builder = CellSet.newBuilder(); + builder.mergeFrom(message); + for (CellSet.Row row: builder.getRowsList()) { + RowModel rowModel = new RowModel(row.getKey().toByteArray()); + for (Cell cell: row.getValuesList()) { + long timestamp = HConstants.LATEST_TIMESTAMP; + if (cell.hasTimestamp()) { + timestamp = cell.getTimestamp(); + } + rowModel.addCell( + new CellModel(cell.getColumn().toByteArray(), timestamp, + cell.getData().toByteArray())); + } + addRow(rowModel); + } + return this; + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java new file mode 100644 index 0000000..ba0eed8 --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java @@ -0,0 +1,241 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.model; + +import java.io.Serializable; +import java.util.LinkedHashMap; +import java.util.Map; + +import javax.xml.bind.annotation.XmlAnyAttribute; +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.namespace.QName; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.codehaus.jackson.annotate.JsonAnyGetter; +import org.codehaus.jackson.annotate.JsonAnySetter; + +/** + * Representation of a column family schema. + * + * <pre> + * <complexType name="ColumnSchema"> + * <attribute name="name" type="string"></attribute> + * <anyAttribute></anyAttribute> + * </complexType> + * </pre> + */ +@XmlRootElement(name="ColumnSchema") [email protected] +public class ColumnSchemaModel implements Serializable { + private static final long serialVersionUID = 1L; + private static QName BLOCKCACHE = new QName(HColumnDescriptor.BLOCKCACHE); + private static QName BLOCKSIZE = new QName(HColumnDescriptor.BLOCKSIZE); + private static QName BLOOMFILTER = new QName(HColumnDescriptor.BLOOMFILTER); + private static QName COMPRESSION = new QName(HColumnDescriptor.COMPRESSION); + private static QName IN_MEMORY = new QName(HConstants.IN_MEMORY); + private static QName TTL = new QName(HColumnDescriptor.TTL); + private static QName VERSIONS = new QName(HConstants.VERSIONS); + + private String name; + private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>(); + + /** + * Default constructor + */ + public ColumnSchemaModel() {} + + /** + * Add an attribute to the column family schema + * @param name the attribute name + * @param value the attribute value + */ + @JsonAnySetter + public void addAttribute(String name, Object value) { + attrs.put(new QName(name), value); + } + + /** + * @param name the attribute name + * @return the attribute value + */ + public String getAttribute(String name) { + Object o = attrs.get(new QName(name)); + return o != null ? o.toString(): null; + } + + /** + * @return the column name + */ + @XmlAttribute + public String getName() { + return name; + } + + /** + * @return the map for holding unspecified (user) attributes + */ + @XmlAnyAttribute + @JsonAnyGetter + public Map<QName,Object> getAny() { + return attrs; + } + + /** + * @param name the table name + */ + public void setName(String name) { + this.name = name; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("{ NAME => '"); + sb.append(name); + sb.append('\''); + for (Map.Entry<QName,Object> e: attrs.entrySet()) { + sb.append(", "); + sb.append(e.getKey().getLocalPart()); + sb.append(" => '"); + sb.append(e.getValue().toString()); + sb.append('\''); + } + sb.append(" }"); + return sb.toString(); + } + + // getters and setters for common schema attributes + + // cannot be standard bean type getters and setters, otherwise this would + // confuse JAXB + + /** + * @return true if the BLOCKCACHE attribute is present and true + */ + public boolean __getBlockcache() { + Object o = attrs.get(BLOCKCACHE); + return o != null ? + Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE; + } + + /** + * @return the value of the BLOCKSIZE attribute or its default if it is unset + */ + public int __getBlocksize() { + Object o = attrs.get(BLOCKSIZE); + return o != null ? + Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE; + } + + /** + * @return the value of the BLOOMFILTER attribute or its default if unset + */ + public String __getBloomfilter() { + Object o = attrs.get(BLOOMFILTER); + return o != null ? o.toString() : HColumnDescriptor.DEFAULT_BLOOMFILTER; + } + + /** + * @return the value of the COMPRESSION attribute or its default if unset + */ + public String __getCompression() { + Object o = attrs.get(COMPRESSION); + return o != null ? o.toString() : HColumnDescriptor.DEFAULT_COMPRESSION; + } + + /** + * @return true if the IN_MEMORY attribute is present and true + */ + public boolean __getInMemory() { + Object o = attrs.get(IN_MEMORY); + return o != null ? + Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY; + } + + /** + * @return the value of the TTL attribute or its default if it is unset + */ + public int __getTTL() { + Object o = attrs.get(TTL); + return o != null ? + Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_TTL; + } + + /** + * @return the value of the VERSIONS attribute or its default if it is unset + */ + public int __getVersions() { + Object o = attrs.get(VERSIONS); + return o != null ? + Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS; + } + + /** + * @param value the desired value of the BLOCKSIZE attribute + */ + public void __setBlocksize(int value) { + attrs.put(BLOCKSIZE, Integer.toString(value)); + } + + /** + * @param value the desired value of the BLOCKCACHE attribute + */ + public void __setBlockcache(boolean value) { + attrs.put(BLOCKCACHE, Boolean.toString(value)); + } + + public void __setBloomfilter(String value) { + attrs.put(BLOOMFILTER, value); + } + + /** + * @param value the desired value of the COMPRESSION attribute + */ + public void __setCompression(String value) { + attrs.put(COMPRESSION, value); + } + + /** + * @param value the desired value of the IN_MEMORY attribute + */ + public void __setInMemory(boolean value) { + attrs.put(IN_MEMORY, Boolean.toString(value)); + } + + /** + * @param value the desired value of the TTL attribute + */ + public void __setTTL(int value) { + attrs.put(TTL, Integer.toString(value)); + } + + /** + * @param value the desired value of the VERSIONS attribute + */ + public void __setVersions(int value) { + attrs.put(VERSIONS, Integer.toString(value)); + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java new file mode 100644 index 0000000..596c754 --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java @@ -0,0 +1,151 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.model; + +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; +import org.codehaus.jackson.annotate.JsonProperty; + +/** + * Representation of a row. A row is a related set of cells, grouped by common + * row key. RowModels do not appear in results by themselves. They are always + * encapsulated within CellSetModels. + * + * <pre> + * <complexType name="Row"> + * <sequence> + * <element name="key" type="base64Binary"></element> + * <element name="cell" type="tns:Cell" + * maxOccurs="unbounded" minOccurs="1"></element> + * </sequence> + * </complexType> + * </pre> + */ +@XmlRootElement(name="Row") +@XmlAccessorType(XmlAccessType.FIELD) [email protected] +public class RowModel implements ProtobufMessageHandler, Serializable { + private static final long serialVersionUID = 1L; + + @JsonProperty("key") + @XmlAttribute + private byte[] key; + + @JsonProperty("Cell") + @XmlElement(name="Cell") + private List<CellModel> cells = new ArrayList<CellModel>(); + + + /** + * Default constructor + */ + public RowModel() { } + + /** + * Constructor + * @param key the row key + */ + public RowModel(final String key) { + this(key.getBytes()); + } + + /** + * Constructor + * @param key the row key + */ + public RowModel(final byte[] key) { + this.key = key; + cells = new ArrayList<CellModel>(); + } + + /** + * Constructor + * @param key the row key + * @param cells the cells + */ + public RowModel(final String key, final List<CellModel> cells) { + this(key.getBytes(), cells); + } + + /** + * Constructor + * @param key the row key + * @param cells the cells + */ + public RowModel(final byte[] key, final List<CellModel> cells) { + this.key = key; + this.cells = cells; + } + + /** + * Adds a cell to the list of cells for this row + * @param cell the cell + */ + public void addCell(CellModel cell) { + cells.add(cell); + } + + /** + * @return the row key + */ + public byte[] getKey() { + return key; + } + + /** + * @param key the row key + */ + public void setKey(byte[] key) { + this.key = key; + } + + /** + * @return the cells + */ + public List<CellModel> getCells() { + return cells; + } + + @Override + public byte[] createProtobufOutput() { + // there is no standalone row protobuf message + throw new UnsupportedOperationException( + "no protobuf equivalent to RowModel"); + } + + @Override + public ProtobufMessageHandler getObjectFromMessage(byte[] message) + throws IOException { + // there is no standalone row protobuf message + throw new UnsupportedOperationException( + "no protobuf equivalent to RowModel"); + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java ---------------------------------------------------------------------- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java new file mode 100644 index 0000000..2ffdd4f --- /dev/null +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -0,0 +1,852 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest.model; + +import java.io.IOException; +import java.io.Serializable; +import java.io.StringReader; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.NavigableSet; + +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.BitComparator; +import org.apache.hadoop.hbase.filter.ByteArrayComparable; +import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; +import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; +import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.ColumnRangeFilter; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.DependentColumnFilter; +import org.apache.hadoop.hbase.filter.FamilyFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +import org.apache.hadoop.hbase.filter.InclusiveStopFilter; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.NullComparator; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.RandomRowFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.filter.SkipFilter; +import org.apache.hadoop.hbase.filter.SubstringComparator; +import org.apache.hadoop.hbase.filter.TimestampsFilter; +import org.apache.hadoop.hbase.filter.ValueFilter; +import org.apache.hadoop.hbase.filter.WhileMatchFilter; +import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; +import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.util.Base64; +import org.apache.hadoop.hbase.util.ByteStringer; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.ByteString; +import com.sun.jersey.api.json.JSONConfiguration; +import com.sun.jersey.api.json.JSONJAXBContext; +import com.sun.jersey.api.json.JSONMarshaller; +import com.sun.jersey.api.json.JSONUnmarshaller; + +/** + * A representation of Scanner parameters. + * + * <pre> + * <complexType name="Scanner"> + * <sequence> + * <element name="column" type="base64Binary" minOccurs="0" maxOccurs="unbounded"/> + * <element name="filter" type="string" minOccurs="0" maxOccurs="1"></element> + * </sequence> + * <attribute name="startRow" type="base64Binary"></attribute> + * <attribute name="endRow" type="base64Binary"></attribute> + * <attribute name="batch" type="int"></attribute> + * <attribute name="caching" type="int"></attribute> + * <attribute name="startTime" type="int"></attribute> + * <attribute name="endTime" type="int"></attribute> + * <attribute name="maxVersions" type="int"></attribute> + * </complexType> + * </pre> + */ +@XmlRootElement(name="Scanner") [email protected] +public class ScannerModel implements ProtobufMessageHandler, Serializable { + + private static final long serialVersionUID = 1L; + + private byte[] startRow = HConstants.EMPTY_START_ROW; + private byte[] endRow = HConstants.EMPTY_END_ROW;; + private List<byte[]> columns = new ArrayList<byte[]>(); + private int batch = Integer.MAX_VALUE; + private long startTime = 0; + private long endTime = Long.MAX_VALUE; + private String filter = null; + private int maxVersions = Integer.MAX_VALUE; + private int caching = -1; + private List<String> labels = new ArrayList<String>(); + private boolean cacheBlocks = true; + + @XmlRootElement + static class FilterModel { + + @XmlRootElement + static class ByteArrayComparableModel { + @XmlAttribute public String type; + @XmlAttribute public String value; + @XmlAttribute public String op; + + static enum ComparatorType { + BinaryComparator, + BinaryPrefixComparator, + BitComparator, + NullComparator, + RegexStringComparator, + SubstringComparator + } + + public ByteArrayComparableModel() { } + + public ByteArrayComparableModel( + ByteArrayComparable comparator) { + String typeName = comparator.getClass().getSimpleName(); + ComparatorType type = ComparatorType.valueOf(typeName); + this.type = typeName; + switch (type) { + case BinaryComparator: + case BinaryPrefixComparator: + this.value = Base64.encodeBytes(comparator.getValue()); + break; + case BitComparator: + this.value = Base64.encodeBytes(comparator.getValue()); + this.op = ((BitComparator)comparator).getOperator().toString(); + break; + case NullComparator: + break; + case RegexStringComparator: + case SubstringComparator: + this.value = Bytes.toString(comparator.getValue()); + break; + default: + throw new RuntimeException("unhandled filter type: " + type); + } + } + + public ByteArrayComparable build() { + ByteArrayComparable comparator; + switch (ComparatorType.valueOf(type)) { + case BinaryComparator: + comparator = new BinaryComparator(Base64.decode(value)); + break; + case BinaryPrefixComparator: + comparator = new BinaryPrefixComparator(Base64.decode(value)); + break; + case BitComparator: + comparator = new BitComparator(Base64.decode(value), + BitComparator.BitwiseOp.valueOf(op)); + break; + case NullComparator: + comparator = new NullComparator(); + break; + case RegexStringComparator: + comparator = new RegexStringComparator(value); + break; + case SubstringComparator: + comparator = new SubstringComparator(value); + break; + default: + throw new RuntimeException("unhandled comparator type: " + type); + } + return comparator; + } + + } + + // A grab bag of fields, would have been a union if this were C. + // These are null by default and will only be serialized if set (non null). + @XmlAttribute public String type; + @XmlAttribute public String op; + @XmlElement ByteArrayComparableModel comparator; + @XmlAttribute public String value; + @XmlElement public List<FilterModel> filters; + @XmlAttribute public Integer limit; + @XmlAttribute public Integer offset; + @XmlAttribute public String family; + @XmlAttribute public String qualifier; + @XmlAttribute public Boolean ifMissing; + @XmlAttribute public Boolean latestVersion; + @XmlAttribute public String minColumn; + @XmlAttribute public Boolean minColumnInclusive; + @XmlAttribute public String maxColumn; + @XmlAttribute public Boolean maxColumnInclusive; + @XmlAttribute public Boolean dropDependentColumn; + @XmlAttribute public Float chance; + @XmlElement public List<String> prefixes; + @XmlElement public List<Long> timestamps; + + static enum FilterType { + ColumnCountGetFilter, + ColumnPaginationFilter, + ColumnPrefixFilter, + ColumnRangeFilter, + DependentColumnFilter, + FamilyFilter, + FilterList, + FirstKeyOnlyFilter, + InclusiveStopFilter, + KeyOnlyFilter, + MultipleColumnPrefixFilter, + PageFilter, + PrefixFilter, + QualifierFilter, + RandomRowFilter, + RowFilter, + SingleColumnValueExcludeFilter, + SingleColumnValueFilter, + SkipFilter, + TimestampsFilter, + ValueFilter, + WhileMatchFilter + } + + public FilterModel() { } + + public FilterModel(Filter filter) { + String typeName = filter.getClass().getSimpleName(); + FilterType type = FilterType.valueOf(typeName); + this.type = typeName; + switch (type) { + case ColumnCountGetFilter: + this.limit = ((ColumnCountGetFilter)filter).getLimit(); + break; + case ColumnPaginationFilter: + this.limit = ((ColumnPaginationFilter)filter).getLimit(); + this.offset = ((ColumnPaginationFilter)filter).getOffset(); + break; + case ColumnPrefixFilter: + this.value = Base64.encodeBytes(((ColumnPrefixFilter)filter).getPrefix()); + break; + case ColumnRangeFilter: + this.minColumn = Base64.encodeBytes(((ColumnRangeFilter)filter).getMinColumn()); + this.minColumnInclusive = ((ColumnRangeFilter)filter).getMinColumnInclusive(); + this.maxColumn = Base64.encodeBytes(((ColumnRangeFilter)filter).getMaxColumn()); + this.maxColumnInclusive = ((ColumnRangeFilter)filter).getMaxColumnInclusive(); + break; + case DependentColumnFilter: { + DependentColumnFilter dcf = (DependentColumnFilter)filter; + this.family = Base64.encodeBytes(dcf.getFamily()); + byte[] qualifier = dcf.getQualifier(); + if (qualifier != null) { + this.qualifier = Base64.encodeBytes(qualifier); + } + this.op = dcf.getOperator().toString(); + this.comparator = new ByteArrayComparableModel(dcf.getComparator()); + this.dropDependentColumn = dcf.dropDependentColumn(); + } break; + case FilterList: + this.op = ((FilterList)filter).getOperator().toString(); + this.filters = new ArrayList<FilterModel>(); + for (Filter child: ((FilterList)filter).getFilters()) { + this.filters.add(new FilterModel(child)); + } + break; + case FirstKeyOnlyFilter: + case KeyOnlyFilter: + break; + case InclusiveStopFilter: + this.value = + Base64.encodeBytes(((InclusiveStopFilter)filter).getStopRowKey()); + break; + case MultipleColumnPrefixFilter: + this.prefixes = new ArrayList<String>(); + for (byte[] prefix: ((MultipleColumnPrefixFilter)filter).getPrefix()) { + this.prefixes.add(Base64.encodeBytes(prefix)); + } + break; + case PageFilter: + this.value = Long.toString(((PageFilter)filter).getPageSize()); + break; + case PrefixFilter: + this.value = Base64.encodeBytes(((PrefixFilter)filter).getPrefix()); + break; + case FamilyFilter: + case QualifierFilter: + case RowFilter: + case ValueFilter: + this.op = ((CompareFilter)filter).getOperator().toString(); + this.comparator = + new ByteArrayComparableModel( + ((CompareFilter)filter).getComparator()); + break; + case RandomRowFilter: + this.chance = ((RandomRowFilter)filter).getChance(); + break; + case SingleColumnValueExcludeFilter: + case SingleColumnValueFilter: { + SingleColumnValueFilter scvf = (SingleColumnValueFilter) filter; + this.family = Base64.encodeBytes(scvf.getFamily()); + byte[] qualifier = scvf.getQualifier(); + if (qualifier != null) { + this.qualifier = Base64.encodeBytes(qualifier); + } + this.op = scvf.getOperator().toString(); + this.comparator = + new ByteArrayComparableModel(scvf.getComparator()); + if (scvf.getFilterIfMissing()) { + this.ifMissing = true; + } + if (scvf.getLatestVersionOnly()) { + this.latestVersion = true; + } + } break; + case SkipFilter: + this.filters = new ArrayList<FilterModel>(); + this.filters.add(new FilterModel(((SkipFilter)filter).getFilter())); + break; + case TimestampsFilter: + this.timestamps = ((TimestampsFilter)filter).getTimestamps(); + break; + case WhileMatchFilter: + this.filters = new ArrayList<FilterModel>(); + this.filters.add( + new FilterModel(((WhileMatchFilter)filter).getFilter())); + break; + default: + throw new RuntimeException("unhandled filter type " + type); + } + } + + public Filter build() { + Filter filter; + switch (FilterType.valueOf(type)) { + case ColumnCountGetFilter: + filter = new ColumnCountGetFilter(limit); + break; + case ColumnPaginationFilter: + filter = new ColumnPaginationFilter(limit, offset); + break; + case ColumnPrefixFilter: + filter = new ColumnPrefixFilter(Base64.decode(value)); + break; + case ColumnRangeFilter: + filter = new ColumnRangeFilter(Base64.decode(minColumn), + minColumnInclusive, Base64.decode(maxColumn), + maxColumnInclusive); + break; + case DependentColumnFilter: + filter = new DependentColumnFilter(Base64.decode(family), + qualifier != null ? Base64.decode(qualifier) : null, + dropDependentColumn, CompareOp.valueOf(op), comparator.build()); + break; + case FamilyFilter: + filter = new FamilyFilter(CompareOp.valueOf(op), comparator.build()); + break; + case FilterList: { + List<Filter> list = new ArrayList<Filter>(); + for (FilterModel model: filters) { + list.add(model.build()); + } + filter = new FilterList(FilterList.Operator.valueOf(op), list); + } break; + case FirstKeyOnlyFilter: + filter = new FirstKeyOnlyFilter(); + break; + case InclusiveStopFilter: + filter = new InclusiveStopFilter(Base64.decode(value)); + break; + case KeyOnlyFilter: + filter = new KeyOnlyFilter(); + break; + case MultipleColumnPrefixFilter: { + byte[][] values = new byte[prefixes.size()][]; + for (int i = 0; i < prefixes.size(); i++) { + values[i] = Base64.decode(prefixes.get(i)); + } + filter = new MultipleColumnPrefixFilter(values); + } break; + case PageFilter: + filter = new PageFilter(Long.valueOf(value)); + break; + case PrefixFilter: + filter = new PrefixFilter(Base64.decode(value)); + break; + case QualifierFilter: + filter = new QualifierFilter(CompareOp.valueOf(op), comparator.build()); + break; + case RandomRowFilter: + filter = new RandomRowFilter(chance); + break; + case RowFilter: + filter = new RowFilter(CompareOp.valueOf(op), comparator.build()); + break; + case SingleColumnValueFilter: + filter = new SingleColumnValueFilter(Base64.decode(family), + qualifier != null ? Base64.decode(qualifier) : null, + CompareOp.valueOf(op), comparator.build()); + if (ifMissing != null) { + ((SingleColumnValueFilter)filter).setFilterIfMissing(ifMissing); + } + if (latestVersion != null) { + ((SingleColumnValueFilter)filter).setLatestVersionOnly(latestVersion); + } + break; + case SingleColumnValueExcludeFilter: + filter = new SingleColumnValueExcludeFilter(Base64.decode(family), + qualifier != null ? Base64.decode(qualifier) : null, + CompareOp.valueOf(op), comparator.build()); + if (ifMissing != null) { + ((SingleColumnValueExcludeFilter)filter).setFilterIfMissing(ifMissing); + } + if (latestVersion != null) { + ((SingleColumnValueExcludeFilter)filter).setLatestVersionOnly(latestVersion); + } + break; + case SkipFilter: + filter = new SkipFilter(filters.get(0).build()); + break; + case TimestampsFilter: + filter = new TimestampsFilter(timestamps); + break; + case ValueFilter: + filter = new ValueFilter(CompareOp.valueOf(op), comparator.build()); + break; + case WhileMatchFilter: + filter = new WhileMatchFilter(filters.get(0).build()); + break; + default: + throw new RuntimeException("unhandled filter type: " + type); + } + return filter; + } + + } + + /** + * @param s the JSON representation of the filter + * @return the filter + * @throws Exception + */ + public static Filter buildFilter(String s) throws Exception { + JSONJAXBContext context = + new JSONJAXBContext(JSONConfiguration.natural().build(), + FilterModel.class); + JSONUnmarshaller unmarshaller = context.createJSONUnmarshaller(); + FilterModel model = unmarshaller.unmarshalFromJSON(new StringReader(s), + FilterModel.class); + return model.build(); + } + + /** + * @param filter the filter + * @return the JSON representation of the filter + * @throws Exception + */ + public static String stringifyFilter(final Filter filter) throws Exception { + JSONJAXBContext context = + new JSONJAXBContext(JSONConfiguration.natural().build(), + FilterModel.class); + JSONMarshaller marshaller = context.createJSONMarshaller(); + StringWriter writer = new StringWriter(); + marshaller.marshallToJSON(new FilterModel(filter), writer); + return writer.toString(); + } + + private static final byte[] COLUMN_DIVIDER = Bytes.toBytes(":"); + + /** + * @param scan the scan specification + * @throws Exception + */ + public static ScannerModel fromScan(Scan scan) throws Exception { + ScannerModel model = new ScannerModel(); + model.setStartRow(scan.getStartRow()); + model.setEndRow(scan.getStopRow()); + Map<byte [], NavigableSet<byte []>> families = scan.getFamilyMap(); + if (families != null) { + for (Map.Entry<byte [], NavigableSet<byte []>> entry : families.entrySet()) { + if (entry.getValue() != null) { + for (byte[] qualifier: entry.getValue()) { + model.addColumn(Bytes.add(entry.getKey(), COLUMN_DIVIDER, qualifier)); + } + } else { + model.addColumn(entry.getKey()); + } + } + } + model.setStartTime(scan.getTimeRange().getMin()); + model.setEndTime(scan.getTimeRange().getMax()); + int caching = scan.getCaching(); + if (caching > 0) { + model.setCaching(caching); + } + int batch = scan.getBatch(); + if (batch > 0) { + model.setBatch(batch); + } + int maxVersions = scan.getMaxVersions(); + if (maxVersions > 0) { + model.setMaxVersions(maxVersions); + } + Filter filter = scan.getFilter(); + if (filter != null) { + model.setFilter(stringifyFilter(filter)); + } + // Add the visbility labels if found in the attributes + Authorizations authorizations = scan.getAuthorizations(); + if (authorizations != null) { + List<String> labels = authorizations.getLabels(); + for (String label : labels) { + model.addLabel(label); + } + } + return model; + } + + /** + * Default constructor + */ + public ScannerModel() {} + + /** + * Constructor + * @param startRow the start key of the row-range + * @param endRow the end key of the row-range + * @param columns the columns to scan + * @param batch the number of values to return in batch + * @param caching the number of rows that the scanner will fetch at once + * @param endTime the upper bound on timestamps of values of interest + * @param maxVersions the maximum number of versions to return + * @param filter a filter specification + * (values with timestamps later than this are excluded) + */ + public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns, + int batch, int caching, long endTime, int maxVersions, String filter) { + super(); + this.startRow = startRow; + this.endRow = endRow; + this.columns = columns; + this.batch = batch; + this.caching = caching; + this.endTime = endTime; + this.maxVersions = maxVersions; + this.filter = filter; + } + + /** + * Constructor + * @param startRow the start key of the row-range + * @param endRow the end key of the row-range + * @param columns the columns to scan + * @param batch the number of values to return in batch + * @param caching the number of rows that the scanner will fetch at once + * @param startTime the lower bound on timestamps of values of interest + * (values with timestamps earlier than this are excluded) + * @param endTime the upper bound on timestamps of values of interest + * (values with timestamps later than this are excluded) + * @param filter a filter specification + */ + public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns, + int batch, int caching, long startTime, long endTime, String filter) { + super(); + this.startRow = startRow; + this.endRow = endRow; + this.columns = columns; + this.batch = batch; + this.caching = caching; + this.startTime = startTime; + this.endTime = endTime; + this.filter = filter; + } + + /** + * Add a column to the column set + * @param column the column name, as <column>(:<qualifier>)? + */ + public void addColumn(byte[] column) { + columns.add(column); + } + + /** + * Add a visibility label to the scan + */ + public void addLabel(String label) { + labels.add(label); + } + /** + * @return true if a start row was specified + */ + public boolean hasStartRow() { + return !Bytes.equals(startRow, HConstants.EMPTY_START_ROW); + } + + /** + * @return start row + */ + @XmlAttribute + public byte[] getStartRow() { + return startRow; + } + + /** + * @return true if an end row was specified + */ + public boolean hasEndRow() { + return !Bytes.equals(endRow, HConstants.EMPTY_END_ROW); + } + + /** + * @return end row + */ + @XmlAttribute + public byte[] getEndRow() { + return endRow; + } + + /** + * @return list of columns of interest in column:qualifier format, or empty for all + */ + @XmlElement(name="column") + public List<byte[]> getColumns() { + return columns; + } + + @XmlElement(name="labels") + public List<String> getLabels() { + return labels; + } + + /** + * @return the number of cells to return in batch + */ + @XmlAttribute + public int getBatch() { + return batch; + } + + /** + * @return the number of rows that the scanner to fetch at once + */ + @XmlAttribute + public int getCaching() { + return caching; + } + + /** + * @return true if HFile blocks should be cached on the servers for this scan, false otherwise + */ + @XmlAttribute + public boolean getCacheBlocks() { + return cacheBlocks; + } + + /** + * @return the lower bound on timestamps of items of interest + */ + @XmlAttribute + public long getStartTime() { + return startTime; + } + + /** + * @return the upper bound on timestamps of items of interest + */ + @XmlAttribute + public long getEndTime() { + return endTime; + } + + /** + * @return maximum number of versions to return + */ + @XmlAttribute + public int getMaxVersions() { + return maxVersions; + } + + /** + * @return the filter specification + */ + @XmlElement + public String getFilter() { + return filter; + } + + /** + * @param startRow start row + */ + public void setStartRow(byte[] startRow) { + this.startRow = startRow; + } + + /** + * @param endRow end row + */ + public void setEndRow(byte[] endRow) { + this.endRow = endRow; + } + + /** + * @param columns list of columns of interest in column:qualifier format, or empty for all + */ + public void setColumns(List<byte[]> columns) { + this.columns = columns; + } + + /** + * @param batch the number of cells to return in batch + */ + public void setBatch(int batch) { + this.batch = batch; + } + + /** + * @param caching the number of rows to fetch at once + */ + public void setCaching(int caching) { + this.caching = caching; + } + + /** + * @param value true if HFile blocks should be cached on the servers for this scan, false otherwise + */ + public void setCacheBlocks(boolean value) { + this.cacheBlocks = value; + } + + /** + * @param maxVersions maximum number of versions to return + */ + public void setMaxVersions(int maxVersions) { + this.maxVersions = maxVersions; + } + + /** + * @param startTime the lower bound on timestamps of values of interest + */ + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + /** + * @param endTime the upper bound on timestamps of values of interest + */ + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + /** + * @param filter the filter specification + */ + public void setFilter(String filter) { + this.filter = filter; + } + + @Override + public byte[] createProtobufOutput() { + Scanner.Builder builder = Scanner.newBuilder(); + if (!Bytes.equals(startRow, HConstants.EMPTY_START_ROW)) { + builder.setStartRow(ByteStringer.wrap(startRow)); + } + if (!Bytes.equals(endRow, HConstants.EMPTY_START_ROW)) { + builder.setEndRow(ByteStringer.wrap(endRow)); + } + for (byte[] column: columns) { + builder.addColumns(ByteStringer.wrap(column)); + } + if (startTime != 0) { + builder.setStartTime(startTime); + } + if (endTime != 0) { + builder.setEndTime(endTime); + } + builder.setBatch(getBatch()); + if (caching > 0) { + builder.setCaching(caching); + } + builder.setMaxVersions(maxVersions); + if (filter != null) { + builder.setFilter(filter); + } + if (labels != null && labels.size() > 0) { + for (String label : labels) + builder.addLabels(label); + } + builder.setCacheBlocks(cacheBlocks); + return builder.build().toByteArray(); + } + + @Override + public ProtobufMessageHandler getObjectFromMessage(byte[] message) + throws IOException { + Scanner.Builder builder = Scanner.newBuilder(); + builder.mergeFrom(message); + if (builder.hasStartRow()) { + startRow = builder.getStartRow().toByteArray(); + } + if (builder.hasEndRow()) { + endRow = builder.getEndRow().toByteArray(); + } + for (ByteString column: builder.getColumnsList()) { + addColumn(column.toByteArray()); + } + if (builder.hasBatch()) { + batch = builder.getBatch(); + } + if (builder.hasCaching()) { + caching = builder.getCaching(); + } + if (builder.hasStartTime()) { + startTime = builder.getStartTime(); + } + if (builder.hasEndTime()) { + endTime = builder.getEndTime(); + } + if (builder.hasMaxVersions()) { + maxVersions = builder.getMaxVersions(); + } + if (builder.hasFilter()) { + filter = builder.getFilter(); + } + if (builder.getLabelsList() != null) { + List<String> labels = builder.getLabelsList(); + for(String label : labels) { + addLabel(label); + } + } + if (builder.hasCacheBlocks()) { + this.cacheBlocks = builder.getCacheBlocks(); + } + return this; + } + +}
