http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java deleted file mode 100644 index 93b4b36..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import java.io.IOException; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; - -/** - * Identifies fully qualified columns for the {@link EntityTable}. - */ -public enum EntityColumn implements Column<EntityTable> { - - /** - * Identifier for the entity. - */ - ID(EntityColumnFamily.INFO, "id"), - - /** - * The type of entity. - */ - TYPE(EntityColumnFamily.INFO, "type"), - - /** - * When the entity was created. - */ - CREATED_TIME(EntityColumnFamily.INFO, "created_time", new LongConverter()), - - /** - * The version of the flow that this entity belongs to. - */ - FLOW_VERSION(EntityColumnFamily.INFO, "flow_version"); - - private final ColumnHelper<EntityTable> column; - private final ColumnFamily<EntityTable> columnFamily; - private final String columnQualifier; - private final byte[] columnQualifierBytes; - - EntityColumn(ColumnFamily<EntityTable> columnFamily, - String columnQualifier) { - this(columnFamily, columnQualifier, GenericConverter.getInstance()); - } - - EntityColumn(ColumnFamily<EntityTable> columnFamily, - String columnQualifier, ValueConverter converter) { - this.columnFamily = columnFamily; - this.columnQualifier = columnQualifier; - // Future-proof by ensuring the right column prefix hygiene. - this.columnQualifierBytes = - Bytes.toBytes(Separator.SPACE.encode(columnQualifier)); - this.column = new ColumnHelper<EntityTable>(columnFamily, converter); - } - - /** - * @return the column name value - */ - private String getColumnQualifier() { - return columnQualifier; - } - - public void store(byte[] rowKey, - TypedBufferedMutator<EntityTable> tableMutator, Long timestamp, - Object inputValue, Attribute... attributes) throws IOException { - column.store(rowKey, tableMutator, columnQualifierBytes, timestamp, - inputValue, attributes); - } - - public Object readResult(Result result) throws IOException { - return column.readResult(result, columnQualifierBytes); - } - - /** - * Retrieve an {@link EntityColumn} given a name, or null if there is no - * match. The following holds true: {@code columnFor(x) == columnFor(y)} if - * and only if {@code x.equals(y)} or {@code (x == y == null)} - * - * @param columnQualifier Name of the column to retrieve - * @return the corresponding {@link EntityColumn} or null - */ - public static final EntityColumn columnFor(String columnQualifier) { - - // Match column based on value, assume column family matches. - for (EntityColumn ec : EntityColumn.values()) { - // Find a match based only on name. - if (ec.getColumnQualifier().equals(columnQualifier)) { - return ec; - } - } - - // Default to null - return null; - } - - @Override - public byte[] getColumnQualifierBytes() { - return columnQualifierBytes.clone(); - } - - @Override - public byte[] getColumnFamilyBytes() { - return columnFamily.getBytes(); - } - - @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); - } - - /** - * Retrieve an {@link EntityColumn} given a name, or null if there is no - * match. The following holds true: {@code columnFor(a,x) == columnFor(b,y)} - * if and only if {@code a.equals(b) & x.equals(y)} or - * {@code (x == y == null)} - * - * @param columnFamily The columnFamily for which to retrieve the column. - * @param name Name of the column to retrieve - * @return the corresponding {@link EntityColumn} or null if both arguments - * don't match. - */ - public static final EntityColumn columnFor(EntityColumnFamily columnFamily, - String name) { - - for (EntityColumn ec : EntityColumn.values()) { - // Find a match based column family and on name. - if (ec.columnFamily.equals(columnFamily) - && ec.getColumnQualifier().equals(name)) { - return ec; - } - } - - // Default to null - return null; - } - -}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java deleted file mode 100644 index 7c63727..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnFamily.java +++ /dev/null @@ -1,65 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; - -/** - * Represents the entity table column families. - */ -public enum EntityColumnFamily implements ColumnFamily<EntityTable> { - - /** - * Info column family houses known columns, specifically ones included in - * columnfamily filters. - */ - INFO("i"), - - /** - * Configurations are in a separate column family for two reasons: a) the size - * of the config values can be very large and b) we expect that config values - * are often separately accessed from other metrics and info columns. - */ - CONFIGS("c"), - - /** - * Metrics have a separate column family, because they have a separate TTL. - */ - METRICS("m"); - - /** - * Byte representation of this column family. - */ - private final byte[] bytes; - - /** - * @param value create a column family with this name. Must be lower case and - * without spaces. - */ - EntityColumnFamily(String value) { - // column families should be lower case and not contain any spaces. - this.bytes = Bytes.toBytes(Separator.SPACE.encode(value)); - } - - public byte[] getBytes() { - return Bytes.copy(bytes); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java deleted file mode 100644 index e410549..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java +++ /dev/null @@ -1,300 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.GenericConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; - -/** - * Identifies partially qualified columns for the entity table. - */ -public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> { - - /** - * To store TimelineEntity getIsRelatedToEntities values. - */ - IS_RELATED_TO(EntityColumnFamily.INFO, "s"), - - /** - * To store TimelineEntity getRelatesToEntities values. - */ - RELATES_TO(EntityColumnFamily.INFO, "r"), - - /** - * To store TimelineEntity info values. - */ - INFO(EntityColumnFamily.INFO, "i"), - - /** - * Lifecycle events for an entity. - */ - EVENT(EntityColumnFamily.INFO, "e", true), - - /** - * Config column stores configuration with config key as the column name. - */ - CONFIG(EntityColumnFamily.CONFIGS, null), - - /** - * Metrics are stored with the metric name as the column name. - */ - METRIC(EntityColumnFamily.METRICS, null, new LongConverter()); - - private final ColumnHelper<EntityTable> column; - private final ColumnFamily<EntityTable> columnFamily; - - /** - * Can be null for those cases where the provided column qualifier is the - * entire column name. - */ - private final String columnPrefix; - private final byte[] columnPrefixBytes; - - /** - * Private constructor, meant to be used by the enum definition. - * - * @param columnFamily that this column is stored in. - * @param columnPrefix for this column. - */ - EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily, - String columnPrefix) { - this(columnFamily, columnPrefix, false, GenericConverter.getInstance()); - } - - EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily, - String columnPrefix, boolean compondColQual) { - this(columnFamily, columnPrefix, compondColQual, - GenericConverter.getInstance()); - } - - EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily, - String columnPrefix, ValueConverter converter) { - this(columnFamily, columnPrefix, false, converter); - } - - /** - * Private constructor, meant to be used by the enum definition. - * - * @param columnFamily that this column is stored in. - * @param columnPrefix for this column. - * @param converter used to encode/decode values to be stored in HBase for - * this column prefix. - */ - EntityColumnPrefix(ColumnFamily<EntityTable> columnFamily, - String columnPrefix, boolean compondColQual, ValueConverter converter) { - column = new ColumnHelper<EntityTable>(columnFamily, converter); - this.columnFamily = columnFamily; - this.columnPrefix = columnPrefix; - if (columnPrefix == null) { - this.columnPrefixBytes = null; - } else { - // Future-proof by ensuring the right column prefix hygiene. - this.columnPrefixBytes = - Bytes.toBytes(Separator.SPACE.encode(columnPrefix)); - } - } - - /** - * @return the column name value - */ - public String getColumnPrefix() { - return columnPrefix; - } - - @Override - public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifierPrefix); - } - - @Override - public byte[] getColumnPrefixBytes(String qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifierPrefix); - } - - @Override - public byte[] getColumnFamilyBytes() { - return columnFamily.getBytes(); - } - - @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - public void store(byte[] rowKey, - TypedBufferedMutator<EntityTable> tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object) - */ - public void store(byte[] rowKey, - TypedBufferedMutator<EntityTable> tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - attributes); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = - ColumnHelper.getColumnQualifier(this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public <K> Map<K, Object> readResults(Result result, - KeyConverter<K> keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public <K, V> NavigableMap<K, NavigableMap<Long, V>> - readResultsWithTimestamps(Result result, KeyConverter<K> keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - - /** - * Retrieve an {@link EntityColumnPrefix} given a name, or null if there is no - * match. The following holds true: {@code columnFor(x) == columnFor(y)} if - * and only if {@code x.equals(y)} or {@code (x == y == null)} - * - * @param columnPrefix Name of the column to retrieve - * @return the corresponding {@link EntityColumnPrefix} or null - */ - public static final EntityColumnPrefix columnFor(String columnPrefix) { - - // Match column based on value, assume column family matches. - for (EntityColumnPrefix ecp : EntityColumnPrefix.values()) { - // Find a match based only on name. - if (ecp.getColumnPrefix().equals(columnPrefix)) { - return ecp; - } - } - - // Default to null - return null; - } - - /** - * Retrieve an {@link EntityColumnPrefix} given a name, or null if there is no - * match. The following holds true: {@code columnFor(a,x) == columnFor(b,y)} - * if and only if {@code (x == y == null)} or - * {@code a.equals(b) & x.equals(y)} - * - * @param columnFamily The columnFamily for which to retrieve the column. - * @param columnPrefix Name of the column to retrieve - * @return the corresponding {@link EntityColumnPrefix} or null if both - * arguments don't match. - */ - public static final EntityColumnPrefix columnFor( - EntityColumnFamily columnFamily, String columnPrefix) { - - // TODO: needs unit test to confirm and need to update javadoc to explain - // null prefix case. - - for (EntityColumnPrefix ecp : EntityColumnPrefix.values()) { - // Find a match based column family and on name. - if (ecp.columnFamily.equals(columnFamily) - && (((columnPrefix == null) && (ecp.getColumnPrefix() == null)) || - (ecp.getColumnPrefix().equals(columnPrefix)))) { - return ecp; - } - } - - // Default to null - return null; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java deleted file mode 100644 index a8f1d0c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java +++ /dev/null @@ -1,249 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.AppIdKeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; - -/** - * Represents a rowkey for the entity table. - */ -public class EntityRowKey { - private final String clusterId; - private final String userId; - private final String flowName; - private final Long flowRunId; - private final String appId; - private final String entityType; - private final Long entityIdPrefix; - private final String entityId; - private final KeyConverter<EntityRowKey> entityRowKeyConverter = - new EntityRowKeyConverter(); - - public EntityRowKey(String clusterId, String userId, String flowName, - Long flowRunId, String appId, String entityType, Long entityIdPrefix, - String entityId) { - this.clusterId = clusterId; - this.userId = userId; - this.flowName = flowName; - this.flowRunId = flowRunId; - this.appId = appId; - this.entityType = entityType; - this.entityIdPrefix = entityIdPrefix; - this.entityId = entityId; - } - - public String getClusterId() { - return clusterId; - } - - public String getUserId() { - return userId; - } - - public String getFlowName() { - return flowName; - } - - public Long getFlowRunId() { - return flowRunId; - } - - public String getAppId() { - return appId; - } - - public String getEntityType() { - return entityType; - } - - public String getEntityId() { - return entityId; - } - - public Long getEntityIdPrefix() { - return entityIdPrefix; - } - - /** - * Constructs a row key for the entity table as follows: - * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!entityId}. - * Typically used while querying a specific entity. - * - * @return byte array with the row key. - */ - public byte[] getRowKey() { - return entityRowKeyConverter.encode(this); - } - - /** - * Given the raw row key as bytes, returns the row key as an object. - * - * @param rowKey byte representation of row key. - * @return An <cite>EntityRowKey</cite> object. - */ - public static EntityRowKey parseRowKey(byte[] rowKey) { - return new EntityRowKeyConverter().decode(rowKey); - } - - /** - * Encodes and decodes row key for entity table. The row key is of the form : - * userName!clusterId!flowName!flowRunId!appId!entityType!entityId. flowRunId - * is a long, appId is encoded/decoded using {@link AppIdKeyConverter} and - * rest are strings. - * <p> - */ - final private static class EntityRowKeyConverter implements - KeyConverter<EntityRowKey> { - - private final AppIdKeyConverter appIDKeyConverter = new AppIdKeyConverter(); - - private EntityRowKeyConverter() { - } - - /** - * Entity row key is of the form - * userName!clusterId!flowName!flowRunId!appId!entityType!entityId w. each - * segment separated by !. The sizes below indicate sizes of each one of - * these segments in sequence. clusterId, userName, flowName, entityType and - * entityId are strings. flowrunId is a long hence 8 bytes in size. app id - * is represented as 12 bytes with cluster timestamp part of appid being 8 - * bytes (long) and seq id being 4 bytes(int). Strings are variable in size - * (i.e. end whenever separator is encountered). This is used while decoding - * and helps in determining where to split. - */ - private static final int[] SEGMENT_SIZES = {Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE, - Bytes.SIZEOF_LONG, Separator.VARIABLE_SIZE }; - - /* - * (non-Javadoc) - * - * Encodes EntityRowKey object into a byte array with each component/field - * in EntityRowKey separated by Separator#QUALIFIERS. This leads to an - * entity table row key of the form - * userName!clusterId!flowName!flowRunId!appId!entityType!entityId If - * entityType in passed EntityRowKey object is null (and the fields - * preceding it i.e. clusterId, userId and flowName, flowRunId and appId - * are not null), this returns a row key prefix of the form - * userName!clusterId!flowName!flowRunId!appId! and if entityId in - * EntityRowKey is null (other 6 components are not null), this returns a - * row key prefix of the form - * userName!clusterId!flowName!flowRunId!appId!entityType! flowRunId is - * inverted while encoding as it helps maintain a descending order for row - * keys in entity table. - * - * @see org.apache.hadoop.yarn.server.timelineservice.storage.common - * .KeyConverter#encode(java.lang.Object) - */ - @Override - public byte[] encode(EntityRowKey rowKey) { - byte[] user = - Separator.encode(rowKey.getUserId(), Separator.SPACE, Separator.TAB, - Separator.QUALIFIERS); - byte[] cluster = - Separator.encode(rowKey.getClusterId(), Separator.SPACE, - Separator.TAB, Separator.QUALIFIERS); - byte[] flow = - Separator.encode(rowKey.getFlowName(), Separator.SPACE, - Separator.TAB, Separator.QUALIFIERS); - byte[] first = Separator.QUALIFIERS.join(user, cluster, flow); - // Note that flowRunId is a long, so we can't encode them all at the same - // time. - byte[] second = - Bytes.toBytes(LongConverter.invertLong(rowKey.getFlowRunId())); - byte[] third = appIDKeyConverter.encode(rowKey.getAppId()); - if (rowKey.getEntityType() == null) { - return Separator.QUALIFIERS.join(first, second, third, - Separator.EMPTY_BYTES); - } - byte[] entityType = - Separator.encode(rowKey.getEntityType(), Separator.SPACE, - Separator.TAB, Separator.QUALIFIERS); - - if (rowKey.getEntityIdPrefix() == null) { - return Separator.QUALIFIERS.join(first, second, third, entityType, - Separator.EMPTY_BYTES); - } - - byte[] enitityIdPrefix = Bytes.toBytes(rowKey.getEntityIdPrefix()); - - if (rowKey.getEntityId() == null) { - return Separator.QUALIFIERS.join(first, second, third, entityType, - enitityIdPrefix, Separator.EMPTY_BYTES); - } - - byte[] entityId = Separator.encode(rowKey.getEntityId(), Separator.SPACE, - Separator.TAB, Separator.QUALIFIERS); - - byte[] fourth = - Separator.QUALIFIERS.join(entityType, enitityIdPrefix, entityId); - - return Separator.QUALIFIERS.join(first, second, third, fourth); - } - - /* - * (non-Javadoc) - * - * Decodes an application row key of the form - * userName!clusterId!flowName!flowRunId!appId!entityType!entityId - * represented in byte format and converts it into an EntityRowKey object. - * flowRunId is inverted while decoding as it was inverted while encoding. - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common - * .KeyConverter#decode(byte[]) - */ - @Override - public EntityRowKey decode(byte[] rowKey) { - byte[][] rowKeyComponents = - Separator.QUALIFIERS.split(rowKey, SEGMENT_SIZES); - if (rowKeyComponents.length != 8) { - throw new IllegalArgumentException("the row key is not valid for " - + "an entity"); - } - String userId = - Separator.decode(Bytes.toString(rowKeyComponents[0]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - String clusterId = - Separator.decode(Bytes.toString(rowKeyComponents[1]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - String flowName = - Separator.decode(Bytes.toString(rowKeyComponents[2]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - Long flowRunId = - LongConverter.invertLong(Bytes.toLong(rowKeyComponents[3])); - String appId = appIDKeyConverter.decode(rowKeyComponents[4]); - String entityType = - Separator.decode(Bytes.toString(rowKeyComponents[5]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - - Long entityPrefixId = Bytes.toLong(rowKeyComponents[6]); - - String entityId = - Separator.decode(Bytes.toString(rowKeyComponents[7]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - return new EntityRowKey(clusterId, userId, flowName, flowRunId, appId, - entityType, entityPrefixId, entityId); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java deleted file mode 100644 index 47a1789..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKeyPrefix.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; - -/** - * Represents a partial rowkey without the entityId or without entityType and - * entityId for the entity table. - * - */ -public class EntityRowKeyPrefix extends EntityRowKey implements - RowKeyPrefix<EntityRowKey> { - - /** - * Creates a prefix which generates the following rowKeyPrefixes for the - * entity table: - * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!}. - * @param clusterId identifying the cluster - * @param userId identifying the user - * @param flowName identifying the flow - * @param flowRunId identifying the individual run of this flow - * @param appId identifying the application - * @param entityType which entity type - * @param entityIdPrefix for entityId - * @param entityId for an entity - */ - public EntityRowKeyPrefix(String clusterId, String userId, String flowName, - Long flowRunId, String appId, String entityType, Long entityIdPrefix, - String entityId) { - super(clusterId, userId, flowName, flowRunId, appId, entityType, - entityIdPrefix, entityId); - } - - /** - * Creates a prefix which generates the following rowKeyPrefixes for the - * entity table: - * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!entityId}. - * - * @param clusterId identifying the cluster - * @param userId identifying the user - * @param flowName identifying the flow - * @param flowRunId identifying the individual run of this flow - * @param appId identifying the application - */ - public EntityRowKeyPrefix(String clusterId, String userId, String flowName, - Long flowRunId, String appId) { - this(clusterId, userId, flowName, flowRunId, appId, null, null, null); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.application. - * RowKeyPrefix#getRowKeyPrefix() - */ - public byte[] getRowKeyPrefix() { - return super.getRowKey(); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java deleted file mode 100644 index 027c8d5..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.regionserver.BloomType; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants; - -/** - * The entity table as column families info, config and metrics. Info stores - * information about a timeline entity object config stores configuration data - * of a timeline entity object metrics stores the metrics of a timeline entity - * object - * - * Example entity table record: - * - * <pre> - * |-------------------------------------------------------------------------| - * | Row | Column Family | Column Family| Column Family| - * | key | info | metrics | config | - * |-------------------------------------------------------------------------| - * | userName! | id:entityId | metricId1: | configKey1: | - * | clusterId! | | metricValue1 | configValue1 | - * | flowName! | type:entityType | @timestamp1 | | - * | flowRunId! | | | configKey2: | - * | AppId! | created_time: | metricId1: | configValue2 | - * | entityType!| 1392993084018 | metricValue2 | | - * | idPrefix! | | @timestamp2 | | - * | entityId | i!infoKey: | | | - * | | infoValue | metricId1: | | - * | | | metricValue1 | | - * | | r!relatesToKey: | @timestamp2 | | - * | | id3=id4=id5 | | | - * | | | | | - * | | s!isRelatedToKey | | | - * | | id7=id9=id6 | | | - * | | | | | - * | | e!eventId=timestamp=infoKey: | | | - * | | eventInfoValue | | | - * | | | | | - * | | flowVersion: | | | - * | | versionValue | | | - * |-------------------------------------------------------------------------| - * </pre> - */ -public class EntityTable extends BaseTable<EntityTable> { - /** entity prefix. */ - private static final String PREFIX = - YarnConfiguration.TIMELINE_SERVICE_PREFIX + "entity"; - - /** config param name that specifies the entity table name. */ - public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name"; - - /** - * config param name that specifies the TTL for metrics column family in - * entity table. - */ - private static final String METRICS_TTL_CONF_NAME = PREFIX - + ".table.metrics.ttl"; - - /** default value for entity table name. */ - public static final String DEFAULT_TABLE_NAME = "timelineservice.entity"; - - /** default TTL is 30 days for metrics timeseries. */ - private static final int DEFAULT_METRICS_TTL = 2592000; - - /** default max number of versions. */ - private static final int DEFAULT_METRICS_MAX_VERSIONS = 1000; - - private static final Log LOG = LogFactory.getLog(EntityTable.class); - - public EntityTable() { - super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, - * org.apache.hadoop.conf.Configuration) - */ - public void createTable(Admin admin, Configuration hbaseConf) - throws IOException { - - TableName table = getTableName(hbaseConf); - if (admin.tableExists(table)) { - // do not disable / delete existing table - // similar to the approach taken by map-reduce jobs when - // output directory exists - throw new IOException("Table " + table.getNameAsString() - + " already exists."); - } - - HTableDescriptor entityTableDescp = new HTableDescriptor(table); - HColumnDescriptor infoCF = - new HColumnDescriptor(EntityColumnFamily.INFO.getBytes()); - infoCF.setBloomFilterType(BloomType.ROWCOL); - entityTableDescp.addFamily(infoCF); - - HColumnDescriptor configCF = - new HColumnDescriptor(EntityColumnFamily.CONFIGS.getBytes()); - configCF.setBloomFilterType(BloomType.ROWCOL); - configCF.setBlockCacheEnabled(true); - entityTableDescp.addFamily(configCF); - - HColumnDescriptor metricsCF = - new HColumnDescriptor(EntityColumnFamily.METRICS.getBytes()); - entityTableDescp.addFamily(metricsCF); - metricsCF.setBlockCacheEnabled(true); - // always keep 1 version (the latest) - metricsCF.setMinVersions(1); - metricsCF.setMaxVersions(DEFAULT_METRICS_MAX_VERSIONS); - metricsCF.setTimeToLive(hbaseConf.getInt(METRICS_TTL_CONF_NAME, - DEFAULT_METRICS_TTL)); - entityTableDescp.setRegionSplitPolicyClassName( - "org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy"); - entityTableDescp.setValue("KeyPrefixRegionSplitPolicy.prefix_length", - TimelineHBaseSchemaConstants.USERNAME_SPLIT_KEY_PREFIX_LENGTH); - admin.createTable(entityTableDescp, - TimelineHBaseSchemaConstants.getUsernameSplits()); - LOG.info("Status of table creation for " + table.getNameAsString() + "=" - + admin.tableExists(table)); - } - - /** - * @param metricsTTL time to live parameter for the metricss in this table. - * @param hbaseConf configururation in which to set the metrics TTL config - * variable. - */ - public void setMetricsTTL(int metricsTTL, Configuration hbaseConf) { - hbaseConf.setInt(METRICS_TTL_CONF_NAME, metricsTTL); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java deleted file mode 100644 index bb0e331..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Package org.apache.hadoop.yarn.server.timelineservice.storage.entity - * contains classes related to implementation for entity table. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -package org.apache.hadoop.yarn.server.timelineservice.storage.entity; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java deleted file mode 100644 index 4e2cf2d..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import org.apache.hadoop.hbase.util.Bytes; - -/** - * Identifies the compaction dimensions for the data in the {@link FlowRunTable} - * . - */ -public enum AggregationCompactionDimension { - - /** - * the application id. - */ - APPLICATION_ID((byte) 101); - - private byte tagType; - private byte[] inBytes; - - private AggregationCompactionDimension(byte tagType) { - this.tagType = tagType; - this.inBytes = Bytes.toBytes(this.name()); - } - - public Attribute getAttribute(String attributeValue) { - return new Attribute(this.name(), Bytes.toBytes(attributeValue)); - } - - public byte getTagType() { - return tagType; - } - - public byte[] getInBytes() { - return this.inBytes.clone(); - } - - public static AggregationCompactionDimension - getAggregationCompactionDimension(String aggCompactDimStr) { - for (AggregationCompactionDimension aggDim : AggregationCompactionDimension - .values()) { - if (aggDim.name().equals(aggCompactDimStr)) { - return aggDim; - } - } - return null; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java deleted file mode 100644 index 40cdd2c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java +++ /dev/null @@ -1,94 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import org.apache.hadoop.hbase.util.Bytes; - -/** - * Identifies the attributes to be set for puts into the {@link FlowRunTable}. - * The numbers used for tagType are prime numbers. - */ -public enum AggregationOperation { - - /** - * When the flow was started. - */ - GLOBAL_MIN((byte) 71), - - /** - * When it ended. - */ - GLOBAL_MAX((byte) 73), - - /** - * The metrics of the flow. - */ - SUM((byte) 79), - - /** - * application running. - */ - SUM_FINAL((byte) 83), - - /** - * Min value as per the latest timestamp - * seen for a given app. - */ - LATEST_MIN((byte) 89), - - /** - * Max value as per the latest timestamp - * seen for a given app. - */ - LATEST_MAX((byte) 97); - - private byte tagType; - private byte[] inBytes; - - private AggregationOperation(byte tagType) { - this.tagType = tagType; - this.inBytes = Bytes.toBytes(this.name()); - } - - public Attribute getAttribute() { - return new Attribute(this.name(), this.inBytes); - } - - public byte getTagType() { - return tagType; - } - - public byte[] getInBytes() { - return this.inBytes.clone(); - } - - /** - * returns the AggregationOperation enum that represents that string. - * @param aggOpStr Aggregation operation. - * @return the AggregationOperation enum that represents that string - */ - public static AggregationOperation getAggregationOperation(String aggOpStr) { - for (AggregationOperation aggOp : AggregationOperation.values()) { - if (aggOp.name().equals(aggOpStr)) { - return aggOp; - } - } - return null; - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java deleted file mode 100644 index d3de518..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/Attribute.java +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -/** - * Defines the attribute tuple to be set for puts into the {@link FlowRunTable}. - */ -public class Attribute { - private final String name; - private final byte[] value; - - public Attribute(String name, byte[] value) { - this.name = name; - this.value = value.clone(); - } - - public String getName() { - return name; - } - - public byte[] getValue() { - return value.clone(); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java deleted file mode 100644 index f9eb5b4..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; - -/** - * Represents the flow run table column families. - */ -public enum FlowActivityColumnFamily - implements ColumnFamily<FlowActivityTable> { - - /** - * Info column family houses known columns, specifically ones included in - * columnfamily filters. - */ - INFO("i"); - - /** - * Byte representation of this column family. - */ - private final byte[] bytes; - - /** - * @param value - * create a column family with this name. Must be lower case and - * without spaces. - */ - private FlowActivityColumnFamily(String value) { - // column families should be lower case and not contain any spaces. - this.bytes = Bytes.toBytes(Separator.SPACE.encode(value)); - } - - public byte[] getBytes() { - return Bytes.copy(bytes); - } - -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java deleted file mode 100644 index 439e0c8..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java +++ /dev/null @@ -1,277 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import java.io.IOException; -import java.util.Map; -import java.util.NavigableMap; - -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ValueConverter; - -/** - * Identifies partially qualified columns for the {@link FlowActivityTable}. - */ -public enum FlowActivityColumnPrefix - implements ColumnPrefix<FlowActivityTable> { - - /** - * To store run ids of the flows. - */ - RUN_ID(FlowActivityColumnFamily.INFO, "r", null); - - private final ColumnHelper<FlowActivityTable> column; - private final ColumnFamily<FlowActivityTable> columnFamily; - - /** - * Can be null for those cases where the provided column qualifier is the - * entire column name. - */ - private final String columnPrefix; - private final byte[] columnPrefixBytes; - - private final AggregationOperation aggOp; - - /** - * Private constructor, meant to be used by the enum definition. - * - * @param columnFamily - * that this column is stored in. - * @param columnPrefix - * for this column. - */ - private FlowActivityColumnPrefix( - ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix, - AggregationOperation aggOp) { - this(columnFamily, columnPrefix, aggOp, false); - } - - private FlowActivityColumnPrefix( - ColumnFamily<FlowActivityTable> columnFamily, String columnPrefix, - AggregationOperation aggOp, boolean compoundColQual) { - column = new ColumnHelper<FlowActivityTable>(columnFamily); - this.columnFamily = columnFamily; - this.columnPrefix = columnPrefix; - if (columnPrefix == null) { - this.columnPrefixBytes = null; - } else { - // Future-proof by ensuring the right column prefix hygiene. - this.columnPrefixBytes = Bytes.toBytes(Separator.SPACE - .encode(columnPrefix)); - } - this.aggOp = aggOp; - } - - /** - * @return the column name value - */ - public String getColumnPrefix() { - return columnPrefix; - } - - @Override - public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifierPrefix); - } - - @Override - public byte[] getColumnPrefixBytes(String qualifierPrefix) { - return ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifierPrefix); - } - - public byte[] getColumnPrefixBytes() { - return columnPrefixBytes.clone(); - } - - @Override - public byte[] getColumnFamilyBytes() { - return columnFamily.getBytes(); - } - - @Override - public ValueConverter getValueConverter() { - return column.getValueConverter(); - } - - public AggregationOperation getAttribute() { - return aggOp; - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, byte[], java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - @Override - public void store(byte[] rowKey, - TypedBufferedMutator<FlowActivityTable> tableMutator, byte[] qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp); - column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue, - combinedAttributes); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResult(org.apache.hadoop.hbase.client.Result, java.lang.String) - */ - public Object readResult(Result result, String qualifier) throws IOException { - byte[] columnQualifier = ColumnHelper.getColumnQualifier( - this.columnPrefixBytes, qualifier); - return column.readResult(result, columnQualifier); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResults(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public <K> Map<K, Object> readResults(Result result, - KeyConverter<K> keyConverter) throws IOException { - return column.readResults(result, columnPrefixBytes, keyConverter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result, - * org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter) - */ - public <K, V> NavigableMap<K, NavigableMap<Long, V>> - readResultsWithTimestamps(Result result, KeyConverter<K> keyConverter) - throws IOException { - return column.readResultsWithTimestamps(result, columnPrefixBytes, - keyConverter); - } - - /** - * Retrieve an {@link FlowActivityColumnPrefix} given a name, or null if there - * is no match. The following holds true: {@code columnFor(x) == columnFor(y)} - * if and only if {@code x.equals(y)} or {@code (x == y == null)} - * - * @param columnPrefix - * Name of the column to retrieve - * @return the corresponding {@link FlowActivityColumnPrefix} or null - */ - public static final FlowActivityColumnPrefix columnFor(String columnPrefix) { - - // Match column based on value, assume column family matches. - for (FlowActivityColumnPrefix flowActivityColPrefix : - FlowActivityColumnPrefix.values()) { - // Find a match based only on name. - if (flowActivityColPrefix.getColumnPrefix().equals(columnPrefix)) { - return flowActivityColPrefix; - } - } - // Default to null - return null; - } - - /** - * Retrieve an {@link FlowActivityColumnPrefix} given a name, or null if there - * is no match. The following holds true: - * {@code columnFor(a,x) == columnFor(b,y)} if and only if - * {@code (x == y == null)} or {@code a.equals(b) & x.equals(y)} - * - * @param columnFamily - * The columnFamily for which to retrieve the column. - * @param columnPrefix - * Name of the column to retrieve - * @return the corresponding {@link FlowActivityColumnPrefix} or null if both - * arguments don't match. - */ - public static final FlowActivityColumnPrefix columnFor( - FlowActivityColumnFamily columnFamily, String columnPrefix) { - - // TODO: needs unit test to confirm and need to update javadoc to explain - // null prefix case. - - for (FlowActivityColumnPrefix flowActivityColumnPrefix : - FlowActivityColumnPrefix.values()) { - // Find a match based column family and on name. - if (flowActivityColumnPrefix.columnFamily.equals(columnFamily) - && (((columnPrefix == null) && (flowActivityColumnPrefix - .getColumnPrefix() == null)) || (flowActivityColumnPrefix - .getColumnPrefix().equals(columnPrefix)))) { - return flowActivityColumnPrefix; - } - } - // Default to null - return null; - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix - * #store(byte[], - * org.apache.hadoop.yarn.server.timelineservice.storage.common. - * TypedBufferedMutator, java.lang.String, java.lang.Long, java.lang.Object, - * org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute[]) - */ - @Override - public void store(byte[] rowKey, - TypedBufferedMutator<FlowActivityTable> tableMutator, String qualifier, - Long timestamp, Object inputValue, Attribute... attributes) - throws IOException { - // Null check - if (qualifier == null) { - throw new IOException("Cannot store column with null qualifier in " - + tableMutator.getName().getNameAsString()); - } - - byte[] columnQualifier = getColumnPrefixBytes(qualifier); - Attribute[] combinedAttributes = - HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp); - column.store(rowKey, tableMutator, columnQualifier, null, inputValue, - combinedAttributes); - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java deleted file mode 100644 index bb77e36..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java +++ /dev/null @@ -1,196 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; - -/** - * Represents a rowkey for the flow activity table. - */ -public class FlowActivityRowKey { - - private final String clusterId; - private final Long dayTs; - private final String userId; - private final String flowName; - private final KeyConverter<FlowActivityRowKey> flowActivityRowKeyConverter = - new FlowActivityRowKeyConverter(); - - /** - * @param clusterId identifying the cluster - * @param dayTs to be converted to the top of the day timestamp - * @param userId identifying user - * @param flowName identifying the flow - */ - public FlowActivityRowKey(String clusterId, Long dayTs, String userId, - String flowName) { - this(clusterId, dayTs, userId, flowName, true); - } - - /** - * @param clusterId identifying the cluster - * @param timestamp when the flow activity happened. May be converted to the - * top of the day depending on the convertDayTsToTopOfDay argument. - * @param userId identifying user - * @param flowName identifying the flow - * @param convertDayTsToTopOfDay if true and timestamp isn't null, then - * timestamp will be converted to the top-of-the day timestamp - */ - protected FlowActivityRowKey(String clusterId, Long timestamp, String userId, - String flowName, boolean convertDayTsToTopOfDay) { - this.clusterId = clusterId; - if (convertDayTsToTopOfDay && (timestamp != null)) { - this.dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(timestamp); - } else { - this.dayTs = timestamp; - } - this.userId = userId; - this.flowName = flowName; - } - - public String getClusterId() { - return clusterId; - } - - public Long getDayTimestamp() { - return dayTs; - } - - public String getUserId() { - return userId; - } - - public String getFlowName() { - return flowName; - } - - /** - * Constructs a row key for the flow activity table as follows: - * {@code clusterId!dayTimestamp!user!flowName}. - * - * @return byte array for the row key - */ - public byte[] getRowKey() { - return flowActivityRowKeyConverter.encode(this); - } - - /** - * Given the raw row key as bytes, returns the row key as an object. - * - * @param rowKey Byte representation of row key. - * @return A <cite>FlowActivityRowKey</cite> object. - */ - public static FlowActivityRowKey parseRowKey(byte[] rowKey) { - return new FlowActivityRowKeyConverter().decode(rowKey); - } - - /** - * Encodes and decodes row key for flow activity table. The row key is of the - * form : clusterId!dayTimestamp!user!flowName. dayTimestamp(top of the day - * timestamp) is a long and rest are strings. - * <p> - */ - final private static class FlowActivityRowKeyConverter implements - KeyConverter<FlowActivityRowKey> { - - private FlowActivityRowKeyConverter() { - } - - /** - * The flow activity row key is of the form - * clusterId!dayTimestamp!user!flowName with each segment separated by !. - * The sizes below indicate sizes of each one of these segements in - * sequence. clusterId, user and flowName are strings. Top of the day - * timestamp is a long hence 8 bytes in size. Strings are variable in size - * (i.e. they end whenever separator is encountered). This is used while - * decoding and helps in determining where to split. - */ - private static final int[] SEGMENT_SIZES = {Separator.VARIABLE_SIZE, - Bytes.SIZEOF_LONG, Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }; - - /* - * (non-Javadoc) - * - * Encodes FlowActivityRowKey object into a byte array with each - * component/field in FlowActivityRowKey separated by Separator#QUALIFIERS. - * This leads to an flow activity table row key of the form - * clusterId!dayTimestamp!user!flowName. If dayTimestamp in passed - * FlowActivityRowKey object is null and clusterId is not null, then this - * returns a row key prefix as clusterId! and if userId in - * FlowActivityRowKey is null (and the fields preceding it i.e. clusterId - * and dayTimestamp are not null), this returns a row key prefix as - * clusterId!dayTimeStamp! dayTimestamp is inverted while encoding as it - * helps maintain a descending order for row keys in flow activity table. - * - * @see org.apache.hadoop.yarn.server.timelineservice.storage.common - * .KeyConverter#encode(java.lang.Object) - */ - @Override - public byte[] encode(FlowActivityRowKey rowKey) { - if (rowKey.getDayTimestamp() == null) { - return Separator.QUALIFIERS.join(Separator.encode( - rowKey.getClusterId(), Separator.SPACE, Separator.TAB, - Separator.QUALIFIERS), Separator.EMPTY_BYTES); - } - if (rowKey.getUserId() == null) { - return Separator.QUALIFIERS.join(Separator.encode( - rowKey.getClusterId(), Separator.SPACE, Separator.TAB, - Separator.QUALIFIERS), Bytes.toBytes(LongConverter - .invertLong(rowKey.getDayTimestamp())), Separator.EMPTY_BYTES); - } - return Separator.QUALIFIERS.join(Separator.encode(rowKey.getClusterId(), - Separator.SPACE, Separator.TAB, Separator.QUALIFIERS), Bytes - .toBytes(LongConverter.invertLong(rowKey.getDayTimestamp())), - Separator.encode(rowKey.getUserId(), Separator.SPACE, Separator.TAB, - Separator.QUALIFIERS), Separator.encode(rowKey.getFlowName(), - Separator.SPACE, Separator.TAB, Separator.QUALIFIERS)); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.common - * .KeyConverter#decode(byte[]) - */ - @Override - public FlowActivityRowKey decode(byte[] rowKey) { - byte[][] rowKeyComponents = - Separator.QUALIFIERS.split(rowKey, SEGMENT_SIZES); - if (rowKeyComponents.length != 4) { - throw new IllegalArgumentException("the row key is not valid for " - + "a flow activity"); - } - String clusterId = - Separator.decode(Bytes.toString(rowKeyComponents[0]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - Long dayTs = LongConverter.invertLong(Bytes.toLong(rowKeyComponents[1])); - String userId = - Separator.decode(Bytes.toString(rowKeyComponents[2]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - String flowName = - Separator.decode(Bytes.toString(rowKeyComponents[3]), - Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); - return new FlowActivityRowKey(clusterId, dayTs, userId, flowName); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java deleted file mode 100644 index eb88e54..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKeyPrefix.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; - -/** - * A prefix partial rowkey for flow activities. - */ -public class FlowActivityRowKeyPrefix extends FlowActivityRowKey implements - RowKeyPrefix<FlowActivityRowKey> { - - /** - * Constructs a row key prefix for the flow activity table as follows: - * {@code clusterId!dayTimestamp!}. - * - * @param clusterId Cluster Id. - * @param dayTs Start of the day timestamp. - */ - public FlowActivityRowKeyPrefix(String clusterId, Long dayTs) { - super(clusterId, dayTs, null, null, false); - } - - /** - * Constructs a row key prefix for the flow activity table as follows: - * {@code clusterId!}. - * - * @param clusterId identifying the cluster - */ - public FlowActivityRowKeyPrefix(String clusterId) { - super(clusterId, null, null, null, false); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.application. - * RowKeyPrefix#getRowKeyPrefix() - */ - public byte[] getRowKeyPrefix() { - return super.getRowKey(); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/47ec7f92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java deleted file mode 100644 index 8a0430c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java +++ /dev/null @@ -1,108 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.flow; - -import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.regionserver.BloomType; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; - -/** - * The flow activity table has column family info - * Stores the daily activity record for flows - * Useful as a quick lookup of what flows were - * running on a given day - * - * Example flow activity table record: - * - * <pre> - * |-------------------------------------------| - * | Row key | Column Family | - * | | info | - * |-------------------------------------------| - * | clusterId! | r!runid1:version1 | - * | inv Top of | | - * | Day! | r!runid2:version7 | - * | userName! | | - * | flowName | | - * |-------------------------------------------| - * </pre> - */ -public class FlowActivityTable extends BaseTable<FlowActivityTable> { - /** flow activity table prefix. */ - private static final String PREFIX = - YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowactivity"; - - /** config param name that specifies the flowactivity table name. */ - public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name"; - - /** default value for flowactivity table name. */ - public static final String DEFAULT_TABLE_NAME = - "timelineservice.flowactivity"; - - private static final Log LOG = LogFactory.getLog(FlowActivityTable.class); - - /** default max number of versions. */ - public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE; - - public FlowActivityTable() { - super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.yarn.server.timelineservice.storage.BaseTable#createTable - * (org.apache.hadoop.hbase.client.Admin, - * org.apache.hadoop.conf.Configuration) - */ - public void createTable(Admin admin, Configuration hbaseConf) - throws IOException { - - TableName table = getTableName(hbaseConf); - if (admin.tableExists(table)) { - // do not disable / delete existing table - // similar to the approach taken by map-reduce jobs when - // output directory exists - throw new IOException("Table " + table.getNameAsString() - + " already exists."); - } - - HTableDescriptor flowActivityTableDescp = new HTableDescriptor(table); - HColumnDescriptor infoCF = - new HColumnDescriptor(FlowActivityColumnFamily.INFO.getBytes()); - infoCF.setBloomFilterType(BloomType.ROWCOL); - flowActivityTableDescp.addFamily(infoCF); - infoCF.setMinVersions(1); - infoCF.setMaxVersions(DEFAULT_METRICS_MAX_VERSIONS); - - // TODO: figure the split policy before running in production - admin.createTable(flowActivityTableDescp); - LOG.info("Status of table creation for " + table.getNameAsString() + "=" - + admin.tableExists(table)); - } -} \ No newline at end of file --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org