http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
new file mode 100644
index 0000000..d052d51
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
+
+/**
+ * Set of utility methods to be used by timeline reader web services.
+ */
+final class TimelineReaderWebServicesUtils {
+  private TimelineReaderWebServicesUtils() {
+  }
+
+  /**
+   * Parse a delimited string and convert it into a set of strings. For
+   * instance, if delimiter is ",", then the string should be represented as
+   * "value1,value2,value3".
+   * @param str delimited string.
+   * @param delimiter string is delimited by this delimiter.
+   * @return set of strings.
+   */
+  static Set<String> parseValuesStr(String str, String delimiter) {
+    if (str == null || str.isEmpty()) {
+      return null;
+    }
+    Set<String> strSet = new HashSet<String>();
+    String[] strs = str.split(delimiter);
+    for (String aStr : strs) {
+      strSet.add(aStr.trim());
+    }
+    return strSet;
+  }
+
+  @SuppressWarnings("unchecked")
+  private static <T> void parseKeyValues(Map<String, T> map, String str,
+      String pairsDelim, String keyValuesDelim, boolean stringValue,
+      boolean multipleValues) {
+    String[] pairs = str.split(pairsDelim);
+    for (String pair : pairs) {
+      if (pair == null || pair.trim().isEmpty()) {
+        continue;
+      }
+      String[] pairStrs = pair.split(keyValuesDelim);
+      if (pairStrs.length < 2) {
+        continue;
+      }
+      if (!stringValue) {
+        try {
+          Object value =
+              GenericObjectMapper.OBJECT_READER.readValue(pairStrs[1].trim());
+          map.put(pairStrs[0].trim(), (T) value);
+        } catch (IOException e) {
+          map.put(pairStrs[0].trim(), (T) pairStrs[1].trim());
+        }
+      } else {
+        String key = pairStrs[0].trim();
+        if (multipleValues) {
+          Set<String> values = new HashSet<String>();
+          for (int i = 1; i < pairStrs.length; i++) {
+            values.add(pairStrs[i].trim());
+          }
+          map.put(key, (T) values);
+        } else {
+          map.put(key, (T) pairStrs[1].trim());
+        }
+      }
+    }
+  }
+
+  /**
+   * Parse a delimited string and convert it into a map of key-values with each
+   * key having a set of values. Both the key and values are interpreted as
+   * strings.
+   * For instance, if pairsDelim is "," and keyValuesDelim is ":", then the
+   * string should be represented as
+   * "key1:value11:value12:value13,key2:value21,key3:value31:value32".
+   * @param str delimited string represented as multiple keys having multiple
+   *     values.
+   * @param pairsDelim key-values pairs are delimited by this delimiter.
+   * @param keyValuesDelim values for a key are delimited by this delimiter.
+   * @return a map of key-values with each key having a set of values.
+   */
+  static Map<String, Set<String>> parseKeyStrValuesStr(String str,
+      String pairsDelim, String keyValuesDelim) {
+    if (str == null) {
+      return null;
+    }
+    Map<String, Set<String>> map = new HashMap<String, Set<String>>();
+    parseKeyValues(map, str, pairsDelim, keyValuesDelim, true, true);
+    return map;
+  }
+
+  /**
+   * Parse a delimited string and convert it into a map of key-value pairs with
+   * both the key and value interpreted as strings.
+   * For instance, if pairsDelim is "," and keyValDelim is ":", then the string
+   * should be represented as "key1:value1,key2:value2,key3:value3".
+   * @param str delimited string represented as key-value pairs.
+   * @param pairsDelim key-value pairs are delimited by this delimiter.
+   * @param keyValuesDelim key and value are delimited by this delimiter.
+   * @return a map of key-value pairs with both key and value being strings.
+   */
+  static Map<String, String> parseKeyStrValueStr(String str,
+      String pairsDelim, String keyValDelim) {
+    if (str == null) {
+      return null;
+    }
+    Map<String, String> map = new HashMap<String, String>();
+    parseKeyValues(map, str, pairsDelim, keyValDelim, true, false);
+    return map;
+  }
+
+  /**
+   * Parse a delimited string and convert it into a map of key-value pairs with
+   * key being a string and value interpreted as any object.
+   * For instance, if pairsDelim is "," and keyValDelim is ":", then the string
+   * should be represented as "key1:value1,key2:value2,key3:value3".
+   * @param str delimited string represented as key-value pairs.
+   * @param pairsDelim key-value pairs are delimited by this delimiter.
+   * @param keyValuesDelim key and value are delimited by this delimiter.
+   * @return a map of key-value pairs with key being a string and value amy
+   *     object.
+   */
+  static Map<String, Object> parseKeyStrValueObj(String str,
+      String pairsDelim, String keyValDelim) {
+    if (str == null) {
+      return null;
+    }
+    Map<String, Object> map = new HashMap<String, Object>();
+    parseKeyValues(map, str, pairsDelim, keyValDelim, false, false);
+    return map;
+  }
+
+  /**
+   * Interprets passed string as set of fields delimited by passed delimiter.
+   * For instance, if delimiter is ",", then the passed string should be
+   * represented as "METRICS,CONFIGS" where the delimited parts of the string
+   * present in {@link Field}.
+   * @param str passed string.
+   * @param delimiter string delimiter.
+   * @return a set of {@link Field}.
+   */
+  static EnumSet<Field> parseFieldsStr(String str, String delimiter) {
+    if (str == null) {
+      return null;
+    }
+    String[] strs = str.split(delimiter);
+    EnumSet<Field> fieldList = EnumSet.noneOf(Field.class);
+    for (String s : strs) {
+      fieldList.add(Field.valueOf(s.trim().toUpperCase()));
+    }
+    return fieldList;
+  }
+
+  /**
+   * Interpret passed string as a long.
+   * @param str Passed string.
+   * @return long representation if string is not null, null otherwise.
+   */
+  static Long parseLongStr(String str) {
+    return str == null ? null : Long.parseLong(str.trim());
+  }
+
+  /**
+   * Trims the passed string if its not null.
+   * @param str Passed string.
+   * @return trimmed string if string is not null, null otherwise.
+   */
+  static String parseStr(String str) {
+    return str == null ? null : str.trim();
+  }
+
+  /**
+   * Get UGI from HTTP request.
+   * @param req HTTP request.
+   * @return UGI.
+   */
+  static UserGroupInformation getUser(HttpServletRequest req) {
+    String remoteUser = req.getRemoteUser();
+    UserGroupInformation callerUGI = null;
+    if (remoteUser != null) {
+      callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+    }
+    return callerUGI;
+  }
+
+  /**
+   * Get username from caller UGI.
+   * @param callerUGI caller UGI.
+   * @return username.
+   */
+  static String getUserName(UserGroupInformation callerUGI) {
+    return ((callerUGI != null) ? callerUGI.getUserName().trim() : "");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
new file mode 100644
index 0000000..cac3f06
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
@@ -0,0 +1,245 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import java.util.List;
+
+/**
+ * Used for encoding/decoding UID which will be used for query by UI.
+ */
+enum TimelineUIDConverter {
+  // Flow UID should contain cluster, user and flow name.
+  FLOW_UID {
+    @Override
+    String encodeUID(TimelineReaderContext context) {
+      if (context == null) {
+        return null;
+      }
+      if (context.getClusterId() == null || context.getUserId() == null ||
+          context.getFlowName() == null) {
+        return null;
+      }
+      String[] flowNameTupleArr = {context.getClusterId(), context.getUserId(),
+          context.getFlowName()};
+      return joinAndEscapeUIDParts(flowNameTupleArr);
+    }
+
+    @Override
+    TimelineReaderContext decodeUID(String uId) throws Exception {
+      if (uId == null) {
+        return null;
+      }
+      List<String> flowNameTupleList = splitUID(uId);
+      // Should have 3 parts i.e. cluster, user and flow name.
+      if (flowNameTupleList.size() != 3) {
+        return null;
+      }
+      return new TimelineReaderContext(flowNameTupleList.get(0),
+          flowNameTupleList.get(1), flowNameTupleList.get(2), null,
+          null, null, null);
+    }
+  },
+
+  // Flowrun UID should contain cluster, user, flow name and flowrun id.
+  FLOWRUN_UID{
+    @Override
+    String encodeUID(TimelineReaderContext context) {
+      if (context == null) {
+        return null;
+      }
+      if (context.getClusterId() == null || context.getUserId() == null ||
+          context.getFlowName() == null || context.getFlowRunId() == null) {
+        return null;
+      }
+      String[] flowRunTupleArr = {context.getClusterId(), context.getUserId(),
+          context.getFlowName(), context.getFlowRunId().toString()};
+      return joinAndEscapeUIDParts(flowRunTupleArr);
+    }
+
+    @Override
+    TimelineReaderContext decodeUID(String uId) throws Exception {
+      if (uId == null) {
+        return null;
+      }
+      List<String> flowRunTupleList = splitUID(uId);
+      // Should have 4 parts i.e. cluster, user, flow name and flowrun id.
+      if (flowRunTupleList.size() != 4) {
+        return null;
+      }
+      return new TimelineReaderContext(flowRunTupleList.get(0),
+          flowRunTupleList.get(1), flowRunTupleList.get(2),
+          Long.parseLong(flowRunTupleList.get(3)), null, null, null);
+    }
+  },
+
+  // Application UID should contain cluster, user, flow name, flowrun id
+  // and app id OR cluster and app id(i.e.without flow context info).
+  APPLICATION_UID{
+    @Override
+    String encodeUID(TimelineReaderContext context) {
+      if (context == null) {
+        return null;
+      }
+      if (context.getClusterId() == null || context.getAppId() == null) {
+        return null;
+      }
+      if (context.getUserId() != null && context.getFlowName() != null &&
+          context.getFlowRunId() != null) {
+        // Flow information exists.
+        String[] appTupleArr = {context.getClusterId(), context.getUserId(),
+            context.getFlowName(), context.getFlowRunId().toString(),
+            context.getAppId()};
+        return joinAndEscapeUIDParts(appTupleArr);
+      } else {
+        // Only cluster and app information exists. Flow info does not exist.
+        String[] appTupleArr = {context.getClusterId(), context.getAppId()};
+        return joinAndEscapeUIDParts(appTupleArr);
+      }
+    }
+
+    @Override
+    TimelineReaderContext decodeUID(String uId) throws Exception {
+      if (uId == null) {
+        return null;
+      }
+      List<String> appTupleList = splitUID(uId);
+      // Should have 5 parts i.e. cluster, user, flow name, flowrun id
+      // and app id OR should have 2 parts i.e. cluster and app id.
+      if (appTupleList.size() == 5) {
+        // Flow information exists.
+        return new TimelineReaderContext(appTupleList.get(0),
+            appTupleList.get(1), appTupleList.get(2),
+            Long.parseLong(appTupleList.get(3)), appTupleList.get(4),
+            null, null);
+      } else if (appTupleList.size() == 2) {
+        // Flow information does not exist.
+        return new TimelineReaderContext(appTupleList.get(0), null, null, null,
+            appTupleList.get(1), null, null);
+      } else {
+        return null;
+      }
+    }
+  },
+
+  // Generic Entity UID should contain cluster, user, flow name, flowrun id,
+  // app id, entity type and entity id OR should contain cluster, appid, entity
+  // type and entity id(i.e.without flow context info).
+  GENERIC_ENTITY_UID {
+    @Override
+    String encodeUID(TimelineReaderContext context) {
+      if (context == null) {
+        return null;
+      }
+      if (context.getClusterId() == null || context.getAppId() == null ||
+          context.getEntityType() == null || context.getEntityId() == null) {
+        return null;
+      }
+      if (context.getUserId() != null && context.getFlowName() != null &&
+          context.getFlowRunId() != null) {
+        // Flow information exists.
+        String[] entityTupleArr = {context.getClusterId(), context.getUserId(),
+            context.getFlowName(), context.getFlowRunId().toString(),
+            context.getAppId(), context.getEntityType(), 
context.getEntityId()};
+        return joinAndEscapeUIDParts(entityTupleArr);
+      } else {
+        // Only entity and app information exists. Flow info does not exist.
+        String[] entityTupleArr = {context.getClusterId(), context.getAppId(),
+            context.getEntityType(), context.getEntityId()};
+        return joinAndEscapeUIDParts(entityTupleArr);
+      }
+    }
+
+    @Override
+    TimelineReaderContext decodeUID(String uId) throws Exception {
+      if (uId == null) {
+        return null;
+      }
+      List<String> entityTupleList = splitUID(uId);
+      // Should have 7 parts i.e. cluster, user, flow name, flowrun id, app id,
+      // entity type and entity id OR should have 4 parts i.e. cluster, app id,
+      // entity type and entity id.
+      if (entityTupleList.size() == 7) {
+        // Flow information exists.
+        return new TimelineReaderContext(entityTupleList.get(0),
+            entityTupleList.get(1), entityTupleList.get(2),
+            Long.parseLong(entityTupleList.get(3)), entityTupleList.get(4),
+            entityTupleList.get(5), entityTupleList.get(6));
+      } else if (entityTupleList.size() == 4) {
+        // Flow information does not exist.
+        return new TimelineReaderContext(entityTupleList.get(0), null, null,
+            null, entityTupleList.get(1), entityTupleList.get(2),
+            entityTupleList.get(3));
+      } else {
+        return null;
+      }
+    }
+  };
+
+  /**
+   * Delimiter used for UID.
+   */
+  public static final char UID_DELIMITER_CHAR = '!';
+
+  /**
+   * Escape Character used if delimiter or escape character itself is part of
+   * different components of UID.
+   */
+  public static final char UID_ESCAPE_CHAR = '*';
+
+  /**
+   * Split UID using {@link #UID_DELIMITER_CHAR} and {@link #UID_ESCAPE_CHAR}.
+   * @param uid
+   * @return a list of different parts of UID split across delimiter.
+   * @throws IllegalArgumentException if UID is not properly escaped.
+   */
+  private static List<String> splitUID(String uid)
+      throws IllegalArgumentException {
+    return TimelineReaderUtils.split(uid, UID_DELIMITER_CHAR, UID_ESCAPE_CHAR);
+  }
+
+  /**
+   * Join different parts of UID delimited by {@link #UID_DELIMITER_CHAR} with
+   * delimiter and escape character escaped using {@link #UID_ESCAPE_CHAR} if
+   * UID parts contain them.
+   * @param parts an array of UID parts to be joined.
+   * @return a string joined using the delimiter with escape and delimiter
+   *     characters escaped if they are part of the string parts to be joined.
+   *     Returns null if one of the parts is null.
+   */
+  private static String joinAndEscapeUIDParts(String[] parts) {
+    return TimelineReaderUtils.joinAndEscapeStrings(parts, UID_DELIMITER_CHAR,
+        UID_ESCAPE_CHAR);
+  }
+
+  /**
+   * Encodes UID depending on UID implementation.
+   * @param context
+   * @return UID represented as a string.
+   */
+  abstract String encodeUID(TimelineReaderContext context);
+
+  /**
+   * Decodes UID depending on UID implementation.
+   * @param uId
+   * @return a {@link TimelineReaderContext} object if UID passed can be
+   * decoded, null otherwise.
+   * @throws Exception
+   */
+  abstract TimelineReaderContext decodeUID(String uId) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
index a5a271e..0eeb195 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
@@ -26,6 +26,8 @@ import java.util.Set;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.service.Service;
+import org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity;
+import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
@@ -87,10 +89,14 @@ public interface TimelineReader extends Service {
    * @param fieldsToRetrieve
    *    Specifies which fields of the entity object to retrieve(optional), see
    *    {@link Field}. If null, retrieves 4 fields namely entity id,
-   *    entity type and entity created time. All entities will be returned if
+   *    entity type and entity created time. All fields will be returned if
    *    {@link Field#ALL} is specified.
    * @return a {@link TimelineEntity} instance or null. The entity will
    *    contain the metadata plus the given fields to retrieve.
+   *    If entityType is YARN_FLOW_RUN, entity returned is of type
+   *    {@link FlowRunEntity}.
+   *    For all other entity types, entity returned is of type
+   *    {@link TimelineEntity}.
    * @throws IOException
    */
   TimelineEntity getEntity(String userId, String clusterId, String flowName,
@@ -167,12 +173,18 @@ public interface TimelineReader extends Service {
    * @param fieldsToRetrieve
    *    Specifies which fields of the entity object to retrieve(optional), see
    *    {@link Field}. If null, retrieves 4 fields namely entity id,
-   *    entity type and entity created time. All entities will be returned if
+   *    entity type and entity created time. All fields will be returned if
    *    {@link Field#ALL} is specified.
    * @return A set of {@link TimelineEntity} instances of the given entity type
    *    in the given context scope which matches the given predicates
    *    ordered by created time, descending. Each entity will only contain the
    *    metadata(id, type and created time) plus the given fields to retrieve.
+   *    If entityType is YARN_FLOW_ACTIVITY, entities returned are of type
+   *    {@link FlowActivityEntity}.
+   *    If entityType is YARN_FLOW_RUN, entities returned are of type
+   *    {@link FlowRunEntity}.
+   *    For all other entity types, entities returned are of type
+   *    {@link TimelineEntity}.
    * @throws IOException
    */
   Set<TimelineEntity> getEntities(String userId, String clusterId,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
index 990856f..237b9ac 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
@@ -60,6 +60,7 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnPrefix;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
+import org.apache.hadoop.yarn.webapp.NotFoundException;
 
 import com.google.common.base.Preconditions;
 
@@ -195,9 +196,9 @@ class GenericEntityReader extends TimelineEntityReader {
           AppToFlowColumn.FLOW_ID.readResult(result).toString(),
           
((Number)AppToFlowColumn.FLOW_RUN_ID.readResult(result)).longValue());
     } else {
-       throw new IOException(
-           "Unable to find the context flow ID and flow run ID for clusterId=" 
+
-           clusterId + ", appId=" + appId);
+      throw new NotFoundException(
+          "Unable to find the context flow ID and flow run ID for clusterId=" +
+          clusterId + ", appId=" + appId);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java
new file mode 100644
index 0000000..791d6ab
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+import java.util.List;
+
+import org.junit.Test;
+
+public class TestTimelineReaderUtils {
+
+  @Test
+  public void testSplitUsingEscapeAndDelimChar() throws Exception {
+    List<String> list =
+        TimelineReaderUtils.split("*!cluster!*!b**o***!xer!oozie**", '!', '*');
+    String[] arr = new String[list.size()];
+    arr = list.toArray(arr);
+    assertArrayEquals(new String[] {"!cluster", "!b*o*!xer", "oozie*"}, arr);
+    list = TimelineReaderUtils.split("*!cluster!*!b**o***!xer!!", '!', '*');
+    arr = new String[list.size()];
+    arr = list.toArray(arr);
+    assertArrayEquals(new String[] {"!cluster", "!b*o*!xer", "", ""}, arr);
+  }
+
+  @Test
+  public void testJoinAndEscapeStrings() throws Exception {
+    assertEquals("*!cluster!*!b**o***!xer!oozie**",
+        TimelineReaderUtils.joinAndEscapeStrings(
+            new String[] { "!cluster", "!b*o*!xer", "oozie*"}, '!', '*'));
+    assertEquals("*!cluster!*!b**o***!xer!!",
+        TimelineReaderUtils.joinAndEscapeStrings(
+            new String[] { "!cluster", "!b*o*!xer", "", ""}, '!', '*'));
+    assertNull(TimelineReaderUtils.joinAndEscapeStrings(
+        new String[] { "!cluster", "!b*o*!xer", null, ""}, '!', '*'));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java
index 1aefdca..b6e23a9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java
@@ -166,7 +166,7 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entity/cluster1/app1/app/id_1");
+          "timeline/clusters/cluster1/apps/app1/entities/app/id_1");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -188,8 +188,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entity/cluster1/app1/app/id_1?userid=user1&" +
-          "flowname=flow1&flowrunid=1");
+          "timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
+          "userid=user1&flowname=flow1&flowrunid=1");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -208,7 +208,8 @@ public class TestTimelineReaderWebServices {
     try {
       // Fields are case insensitive.
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          
"timeline/entity/cluster1/app1/app/id_1?fields=CONFIGS,Metrics,info");
+          "timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
+          "fields=CONFIGS,Metrics,info");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -217,7 +218,10 @@ public class TestTimelineReaderWebServices {
       assertEquals("app", entity.getType());
       assertEquals(3, entity.getConfigs().size());
       assertEquals(3, entity.getMetrics().size());
-      assertEquals(1, entity.getInfo().size());
+      assertTrue("UID should be present",
+          entity.getInfo().containsKey(TimelineReaderManager.UID_KEY));
+      // Includes UID.
+      assertEquals(2, entity.getInfo().size());
       // No events will be returned as events are not part of fields.
       assertEquals(0, entity.getEvents().size());
     } finally {
@@ -230,7 +234,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entity/cluster1/app1/app/id_1?fields=ALL");
+          "timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
+          "fields=ALL");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -239,7 +244,10 @@ public class TestTimelineReaderWebServices {
       assertEquals("app", entity.getType());
       assertEquals(3, entity.getConfigs().size());
       assertEquals(3, entity.getMetrics().size());
-      assertEquals(1, entity.getInfo().size());
+      assertTrue("UID should be present",
+          entity.getInfo().containsKey(TimelineReaderManager.UID_KEY));
+      // Includes UID.
+      assertEquals(2, entity.getInfo().size());
       assertEquals(2, entity.getEvents().size());
     } finally {
       client.destroy();
@@ -251,7 +259,7 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entity/cluster1/app1/app/id_10");
+          "timeline/clusters/cluster1/apps/app1/entities/app/id_10");
       verifyHttpResponse(client, uri, Status.NOT_FOUND);
     } finally {
       client.destroy();
@@ -263,7 +271,7 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entity/app1/app/id_1");
+          "timeline/apps/app1/entities/app/id_1");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -272,7 +280,7 @@ public class TestTimelineReaderWebServices {
       assertEquals("app", entity.getType());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/app1/app");
+          "timeline/apps/app1/entities/app");
       resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -289,7 +297,7 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app");
+          "timeline/clusters/cluster1/apps/app1/entities/app");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -312,7 +320,7 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?limit=2");
+          "timeline/clusters/cluster1/apps/app1/entities/app?limit=2");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -326,7 +334,7 @@ public class TestTimelineReaderWebServices {
           entities.contains(newEntity("app", "id_4")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entities/cluster1/app1/app?limit=3");
+          "clusters/cluster1/apps/app1/entities/app?limit=3");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -344,8 +352,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?createdtimestart=1425016502030&"
-          + "createdtimeend=1425016502060");
+          "timeline/clusters/cluster1/apps/app1/entities/app?" +
+          "createdtimestart=1425016502030&createdtimeend=1425016502060");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -356,7 +364,8 @@ public class TestTimelineReaderWebServices {
           entities.contains(newEntity("app", "id_4")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entities/cluster1/app1/app?createdtimeend=1425016502010");
+          "clusters/cluster1/apps/app1/entities/app?createdtimeend" +
+          "=1425016502010");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -366,7 +375,8 @@ public class TestTimelineReaderWebServices {
           entities.contains(newEntity("app", "id_4")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entities/cluster1/app1/app?createdtimestart=1425016502010");
+          "clusters/cluster1/apps/app1/entities/app?createdtimestart=" +
+          "1425016502010");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -384,7 +394,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?relatesto=flow:flow1");
+          "timeline/clusters/cluster1/apps/app1/entities/app?relatesto=" +
+          "flow:flow1");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -395,8 +406,8 @@ public class TestTimelineReaderWebServices {
           entities.contains(newEntity("app", "id_1")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entities/cluster1/app1/app?isrelatedto=type1:tid1_2,type2:" +
-          "tid2_1%60");
+          "clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
+          "type1:tid1_2,type2:tid2_1%60");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -406,8 +417,8 @@ public class TestTimelineReaderWebServices {
           entities.contains(newEntity("app", "id_1")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entities/cluster1/app1/app?isrelatedto=type1:tid1_1:tid1_2" +
-          ",type2:tid2_1%60");
+          "clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
+          "type1:tid1_1:tid1_2,type2:tid2_1%60");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -425,8 +436,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?conffilters=config_1:123," +
-          "config_3:abc");
+          "timeline/clusters/cluster1/apps/app1/entities/app?" +
+          "conffilters=config_1:123,config_3:abc");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -447,7 +458,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?infofilters=info2:3.5");
+          "timeline/clusters/cluster1/apps/app1/entities/app?" +
+          "infofilters=info2:3.5");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -466,7 +478,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?metricfilters=metric3");
+          "timeline/clusters/cluster1/apps/app1/entities/app?" +
+          "metricfilters=metric3");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -487,7 +500,8 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?eventfilters=event_2,event_4");
+          "timeline/clusters/cluster1/apps/app1/entities/app?" +
+          "eventfilters=event_2,event_4");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -506,10 +520,11 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?metricfilters=metric7&" +
-          "isrelatedto=type1:tid1_1;tid1_2,type2:tid2_1%60&relatesto=" +
-          "flow:flow1&eventfilters=event_2,event_4&infofilters=info2:3.5" +
-          "&createdtimestart=1425016502030&createdtimeend=1425016502060");
+          "timeline/clusters/cluster1/apps/app1/entities/app?" +
+          "metricfilters=metric7&isrelatedto=type1:tid1_1;tid1_2,type2:tid2_1" 
+
+          "%60&relatesto=flow:flow1&eventfilters=event_2,event_4&infofilters=" 
+
+          "info2:3.5&createdtimestart=1425016502030&createdtimeend=" +
+          "1425016502060");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -526,15 +541,15 @@ public class TestTimelineReaderWebServices {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/app1/app?flowrunid=a23b");
+          "timeline/clusters/cluster1/apps/app1/entities/app?flowrunid=a23b");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entity/cluster1/app1/app/id_1?flowrunid=2ab15");
+          "clusters/cluster1/apps/app1/entities/app/id_1?flowrunid=2ab15");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
-          "entities/cluster1/app1/app/?limit=#$561av");
+          "clusters/cluster1/apps/app1/entities/app?limit=#$561av");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
     } finally {
       client.destroy();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index 0918d40..9eaa3de 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -28,8 +28,10 @@ import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URL;
 import java.text.DateFormat;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -349,7 +351,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrun/user1/cluster1/flow_name/1002345678919");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs/" +
+          "1002345678919");
       ClientResponse resp = getResponse(client, uri);
       FlowRunEntity entity = resp.getEntity(FlowRunEntity.class);
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -366,7 +369,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
 
       // Query without specifying cluster ID.
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrun/user1/flow_name/1002345678919");
+          "timeline/users/user1/flows/flow_name/runs/1002345678919");
       resp = getResponse(client, uri);
       entity = resp.getEntity(FlowRunEntity.class);
       assertNotNull(entity);
@@ -390,7 +393,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowruns/user1/cluster1/flow_name");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs");
       ClientResponse resp = getResponse(client, uri);
       Set<FlowRunEntity> entities =
           resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
@@ -408,8 +411,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
         assertEquals(0, entity.getMetrics().size());
       }
 
-      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowruns/user1/cluster1/flow_name?limit=1");
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
+          "clusters/cluster1/users/user1/flows/flow_name/runs?limit=1");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -424,7 +427,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       }
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowruns/user1/cluster1/flow_name?" +
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" +
           "createdtimestart=1425016501030");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
@@ -440,7 +443,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       }
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowruns/user1/cluster1/flow_name?" +
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" +
           "createdtimestart=1425016500999&createdtimeend=1425016501035");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
@@ -459,7 +462,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       }
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowruns/user1/cluster1/flow_name?" +
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" +
           "createdtimeend=1425016501030");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
@@ -475,7 +478,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       }
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowruns/user1/cluster1/flow_name?fields=metrics");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" +
+          "fields=metrics");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
       assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
@@ -498,11 +502,262 @@ public class TestTimelineReaderWebServicesHBaseStorage {
   }
 
   @Test
+  public void testGetEntitiesByUID() throws Exception {
+    Client client = createClient();
+    try {
+      // Query all flows.
+      URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/flows");
+      ClientResponse resp = getResponse(client, uri);
+      Set<FlowActivityEntity> flowEntities =
+          resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
+      assertNotNull(flowEntities);
+      assertEquals(2, flowEntities.size());
+      List<String> listFlowUIDs = new ArrayList<String>();
+      for (FlowActivityEntity entity : flowEntities) {
+        String flowUID =
+            (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+        listFlowUIDs.add(flowUID);
+        assertEquals(TimelineUIDConverter.FLOW_UID.encodeUID(
+            new TimelineReaderContext(entity.getCluster(), entity.getUser(),
+            entity.getFlowName(), null, null, null, null)), flowUID);
+        assertTrue((entity.getId().endsWith("@flow_name") &&
+            entity.getFlowRuns().size() == 2) ||
+            (entity.getId().endsWith("@flow_name2") &&
+            entity.getFlowRuns().size() == 1));
+      }
+
+      // Query flowruns based on UID returned in query above.
+      List<String> listFlowRunUIDs = new ArrayList<String>();
+      for (String flowUID : listFlowUIDs) {
+        uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+            "timeline/flow-uid/" + flowUID + "/runs");
+        resp = getResponse(client, uri);
+        Set<FlowRunEntity> frEntities =
+            resp.getEntity(new GenericType<Set<FlowRunEntity>>(){});
+        assertNotNull(frEntities);
+        for (FlowRunEntity entity : frEntities) {
+          String flowRunUID =
+              (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+          listFlowRunUIDs.add(flowRunUID);
+          assertEquals(TimelineUIDConverter.FLOWRUN_UID.encodeUID(
+              new TimelineReaderContext("cluster1", entity.getUser(),
+              entity.getName(), entity.getRunId(), null, null, null)),
+              flowRunUID);
+        }
+      }
+      assertEquals(3, listFlowRunUIDs.size());
+
+      // Query single flowrun based on UIDs' returned in query to get flowruns.
+      for (String flowRunUID : listFlowRunUIDs) {
+        uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+            "timeline/run-uid/" + flowRunUID);
+        resp = getResponse(client, uri);
+        FlowRunEntity entity = resp.getEntity(FlowRunEntity.class);
+        assertNotNull(entity);
+      }
+
+      // Query apps based on UIDs' returned in query to get flowruns.
+      List<String> listAppUIDs = new ArrayList<String>();
+      for (String flowRunUID : listFlowRunUIDs) {
+        TimelineReaderContext context =
+            TimelineUIDConverter.FLOWRUN_UID.decodeUID(flowRunUID);
+        uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+            "timeline/run-uid/" + flowRunUID + "/apps");
+        resp = getResponse(client, uri);
+        Set<TimelineEntity> appEntities =
+            resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+        assertNotNull(appEntities);
+        for (TimelineEntity entity : appEntities) {
+          String appUID =
+              (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+          listAppUIDs.add(appUID);
+          assertEquals(TimelineUIDConverter.APPLICATION_UID.encodeUID(
+              new TimelineReaderContext(context.getClusterId(),
+              context.getUserId(), context.getFlowName(),
+              context.getFlowRunId(), entity.getId(), null, null)), appUID);
+        }
+      }
+      assertEquals(4, listAppUIDs.size());
+
+      // Query single app based on UIDs' returned in query to get apps.
+      for (String appUID : listAppUIDs) {
+        uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+            "timeline/app-uid/" + appUID);
+        resp = getResponse(client, uri);
+        TimelineEntity entity = resp.getEntity(TimelineEntity.class);
+        assertNotNull(entity);
+      }
+
+      // Query entities based on UIDs' returned in query to get apps and
+      // a specific entity type(in this case type1).
+      List<String> listEntityUIDs = new ArrayList<String>();
+      for (String appUID : listAppUIDs) {
+        TimelineReaderContext context =
+            TimelineUIDConverter.APPLICATION_UID.decodeUID(appUID);
+        uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+            "timeline/app-uid/" + appUID + "/entities/type1");
+        resp = getResponse(client, uri);
+        Set<TimelineEntity> entities =
+            resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+        assertNotNull(entities);
+        for (TimelineEntity entity : entities) {
+          String entityUID =
+              (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+          listEntityUIDs.add(entityUID);
+          assertEquals(TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(
+              new TimelineReaderContext(context.getClusterId(),
+              context.getUserId(), context.getFlowName(),
+              context.getFlowRunId(), context.getAppId(), "type1",
+              entity.getId())), entityUID);
+        }
+      }
+      assertEquals(2, listEntityUIDs.size());
+
+      // Query single entity based on UIDs' returned in query to get entities.
+      for (String entityUID : listEntityUIDs) {
+        uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+            "timeline/entity-uid/" + entityUID);
+        resp = getResponse(client, uri);
+        TimelineEntity entity = resp.getEntity(TimelineEntity.class);
+        assertNotNull(entity);
+      }
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/flow-uid/dummy:flow/runs");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/run-uid/dummy:flowrun");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+
+      // Run Id is not a numerical value.
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/run-uid/some:dummy:flow:123v456");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/run-uid/dummy:flowrun/apps");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/app-uid/dummy:app");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/app-uid/dummy:app/entities/type1");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
+          "timeline/entity-uid/dummy:entity");
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+    } finally {
+      client.destroy();
+    }
+  }
+
+  @Test
+  public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception {
+    Client client = createClient();
+    try {
+      String appUIDWithFlowInfo =
+          "cluster1!user1!flow_name!1002345678919!application_1111111111_1111";
+      URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/"+
+          "timeline/app-uid/" + appUIDWithFlowInfo);
+      ClientResponse resp = getResponse(client, uri);
+      TimelineEntity appEntity1 = resp.getEntity(TimelineEntity.class);
+      assertNotNull(appEntity1);
+      assertEquals(
+          TimelineEntityType.YARN_APPLICATION.toString(), 
appEntity1.getType());
+      assertEquals("application_1111111111_1111", appEntity1.getId());
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
+          "app-uid/" + appUIDWithFlowInfo + "/entities/type1");
+      resp = getResponse(client, uri);
+      Set<TimelineEntity> entities1 =
+          resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+      assertNotNull(entities1);
+      assertEquals(2, entities1.size());
+      for (TimelineEntity entity : entities1) {
+        assertNotNull(entity.getInfo());
+        assertEquals(1, entity.getInfo().size());
+        String uid =
+            (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
+        assertNotNull(uid);
+        assertTrue(uid.equals(appUIDWithFlowInfo + "!type1!entity1") ||
+            uid.equals(appUIDWithFlowInfo + "!type1!entity2"));
+      }
+
+      String appUIDWithoutFlowInfo = "cluster1!application_1111111111_1111";
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/"+
+          "app-uid/" + appUIDWithoutFlowInfo);
+      resp = getResponse(client, uri);;
+      TimelineEntity appEntity2 = resp.getEntity(TimelineEntity.class);
+      assertNotNull(appEntity2);
+      assertEquals(
+          TimelineEntityType.YARN_APPLICATION.toString(), 
appEntity2.getType());
+      assertEquals("application_1111111111_1111", appEntity2.getId());
+
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/" +
+          "app-uid/" + appUIDWithoutFlowInfo + "/entities/type1");
+      resp = getResponse(client, uri);
+      Set<TimelineEntity> entities2 =
+          resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+      assertNotNull(entities2);
+      assertEquals(2, entities2.size());
+      for (TimelineEntity entity : entities2) {
+        assertNotNull(entity.getInfo());
+        assertEquals(1, entity.getInfo().size());
+        String uid =
+            (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
+        assertNotNull(uid);
+        assertTrue(uid.equals(appUIDWithoutFlowInfo + "!type1!entity1") ||
+            uid.equals(appUIDWithoutFlowInfo + "!type1!entity2"));
+      }
+
+      String entityUIDWithFlowInfo = appUIDWithFlowInfo + "!type1!entity1";
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/"+
+          "entity-uid/" + entityUIDWithFlowInfo);
+      resp = getResponse(client, uri);;
+      TimelineEntity singleEntity1 = resp.getEntity(TimelineEntity.class);
+      assertNotNull(singleEntity1);
+      assertEquals("type1", singleEntity1.getType());
+      assertEquals("entity1", singleEntity1.getId());
+
+      String entityUIDWithoutFlowInfo =
+          appUIDWithoutFlowInfo + "!type1!entity1";
+      uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/timeline/"+
+          "entity-uid/" + entityUIDWithoutFlowInfo);
+      resp = getResponse(client, uri);;
+      TimelineEntity singleEntity2 = resp.getEntity(TimelineEntity.class);
+      assertNotNull(singleEntity2);
+      assertEquals("type1", singleEntity2.getType());
+      assertEquals("entity1", singleEntity2.getId());
+    } finally {
+      client.destroy();
+    }
+  }
+
+  @Test
+  public void testUIDNotProperlyEscaped() throws Exception {
+    Client client = createClient();
+    try {
+      String appUID =
+          
"cluster1!user*1!flow_name!1002345678919!application_1111111111_1111";
+      URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/"+
+          "timeline/app-uid/" + appUID);
+      verifyHttpResponse(client, uri, Status.BAD_REQUEST);
+    } finally {
+      client.destroy();
+    }
+  }
+
+  @Test
   public void testGetFlows() throws Exception {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1");
+          "timeline/clusters/cluster1/flows");
       ClientResponse resp = getResponse(client, uri);
       Set<FlowActivityEntity> entities =
           resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
@@ -524,7 +779,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       assertEquals(2, entities.size());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-              "timeline/flows/cluster1?limit=1");
+              "timeline/clusters/cluster1/flows?limit=1");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
       assertNotNull(entities);
@@ -532,8 +787,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
 
       DateFormat fmt = TimelineReaderWebServices.DATE_FORMAT.get();
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=" + fmt.format(dayTs) + "-" +
-          fmt.format(dayTs + (2*86400000L)));
+          "timeline/clusters/cluster1/flows?daterange=" + fmt.format(dayTs) +
+          "-" + fmt.format(dayTs + (2*86400000L)));
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
       assertNotNull(entities);
@@ -546,7 +801,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       }
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=" +
+          "timeline/clusters/cluster1/flows?daterange=" +
           fmt.format(dayTs + (4*86400000L)));
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
@@ -554,7 +809,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       assertEquals(0, entities.size());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=-" +
+          "timeline/clusters/cluster1/flows?daterange=-" +
           fmt.format(dayTs + (2*86400000L)));
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
@@ -562,7 +817,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       assertEquals(2, entities.size());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=" +
+          "timeline/clusters/cluster1/flows?daterange=" +
           fmt.format(dayTs - (2*86400000L)) + "-");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
@@ -570,19 +825,19 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       assertEquals(2, entities.size());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=20150711:20150714");
+          "timeline/clusters/cluster1/flows?daterange=20150711:20150714");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=20150714-20150711");
+          "timeline/clusters/cluster1/flows?daterange=20150714-20150711");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=2015071129-20150712");
+          "timeline/clusters/cluster1/flows?daterange=2015071129-20150712");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster1?daterange=20150711-2015071243");
+          "timeline/clusters/cluster1/flows?daterange=20150711-2015071243");
       verifyHttpResponse(client, uri, Status.BAD_REQUEST);
     } finally {
       client.destroy();
@@ -594,7 +849,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/app/cluster1/application_1111111111_1111?" +
+          "timeline/clusters/cluster1/apps/application_1111111111_1111?" +
           
"userid=user1&fields=ALL&flowname=flow_name&flowrunid=1002345678919");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
@@ -612,7 +867,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
       }
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-              "timeline/app/application_1111111111_2222?userid=user1" +
+              "timeline/apps/application_1111111111_2222?userid=user1" +
               "&fields=metrics&flowname=flow_name&flowrunid=1002345678919");
       resp = getResponse(client, uri);
       entity = resp.getEntity(TimelineEntity.class);
@@ -635,7 +890,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/app/cluster1/application_1111111111_1111?" +
+          "timeline/clusters/cluster1/apps/application_1111111111_1111?" +
           "fields=ALL");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
@@ -661,7 +916,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          
"timeline/entity/cluster1/application_1111111111_1111/type1/entity1");
+          "timeline/clusters/cluster1/apps/application_1111111111_1111/" +
+          "entities/type1/entity1");
       ClientResponse resp = getResponse(client, uri);
       TimelineEntity entity = resp.getEntity(TimelineEntity.class);
       assertNotNull(entity);
@@ -677,7 +933,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/entities/cluster1/application_1111111111_1111/type1");
+          "timeline/clusters/cluster1/apps/application_1111111111_1111/" +
+          "entities/type1");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -697,8 +954,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrunapps/user1/cluster1/flow_name/1002345678919?" +
-          "fields=ALL");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs/" +
+          "1002345678919/apps?fields=ALL");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -714,14 +971,15 @@ public class TestTimelineReaderWebServicesHBaseStorage {
 
       // Query without specifying cluster ID.
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrunapps/user1/flow_name/1002345678919");
+          "timeline/users/user1/flows/flow_name/runs/1002345678919/apps");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertNotNull(entities);
       assertEquals(2, entities.size());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrunapps/user1/flow_name/1002345678919?limit=1");
+          "timeline/users/user1/flows/flow_name/runs/1002345678919/" +
+          "apps?limit=1");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertNotNull(entities);
@@ -736,7 +994,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/cluster1/flow_name?fields=ALL");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/apps?" +
+          "fields=ALL");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -754,14 +1013,14 @@ public class TestTimelineReaderWebServicesHBaseStorage {
 
       // Query without specifying cluster ID.
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/flow_name");
+          "timeline/users/user1/flows/flow_name/apps");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertNotNull(entities);
       assertEquals(3, entities.size());
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/flow_name?limit=1");
+          "timeline/users/user1/flows/flow_name/apps?limit=1");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertNotNull(entities);
@@ -777,8 +1036,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     try {
       String entityType = TimelineEntityType.YARN_APPLICATION.toString();
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/cluster1/flow_name?eventfilters=" +
-          ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/apps?" +
+          "eventfilters=" + ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -788,8 +1047,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
           newEntity(entityType, "application_1111111111_1111")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/cluster1/flow_name?metricfilters=" +
-          "HDFS_BYTES_READ");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/apps?" +
+          "metricfilters=HDFS_BYTES_READ");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertNotNull(entities);
@@ -798,8 +1057,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
           newEntity(entityType, "application_1111111111_1111")));
 
       uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/cluster1/flow_name?conffilters=" +
-          "cfg1:value1");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/apps?" +
+          "conffilters=cfg1:value1");
       resp = getResponse(client, uri);
       entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
       assertNotNull(entities);
@@ -816,7 +1075,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrun/user1/cluster1/flow_name/1002345678929");
+          "timeline/clusters/cluster1/users/user1/flows/flow_name/runs/" +
+          "1002345678929");
       verifyHttpResponse(client, uri, Status.NOT_FOUND);
     } finally {
       client.destroy();
@@ -828,7 +1088,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flows/cluster2");
+          "timeline/clusters/cluster2/flows");
       ClientResponse resp = getResponse(client, uri);
       Set<FlowActivityEntity> entities =
           resp.getEntity(new GenericType<Set<FlowActivityEntity>>(){});
@@ -845,8 +1105,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/app/user1/cluster1/flow_name/1002345678919/" +
-          "application_1111111111_1378");
+          "timeline/clusters/cluster1/apps/application_1111111111_1378");
       verifyHttpResponse(client, uri, Status.NOT_FOUND);
     } finally {
       client.destroy();
@@ -858,7 +1117,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowrunapps/user1/cluster2/flow_name/1002345678919");
+          "timeline/clusters/cluster2/users/user1/flows/flow_name/runs/" +
+          "1002345678919/apps");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
@@ -875,7 +1135,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
     Client client = createClient();
     try {
       URI uri = URI.create("http://localhost:"; + serverPort + "/ws/v2/" +
-          "timeline/flowapps/user1/cluster2/flow_name55");
+          "timeline/clusters/cluster2/users/user1/flows/flow_name55/apps");
       ClientResponse resp = getResponse(client, uri);
       Set<TimelineEntity> entities =
           resp.getEntity(new GenericType<Set<TimelineEntity>>(){});

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java
new file mode 100644
index 0000000..8d6235d
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+
+import org.junit.Test;
+
+public class TestTimelineUIDConverter {
+
+  @Test
+  public void testUIDEncodingDecoding() throws Exception {
+    TimelineReaderContext context = new TimelineReaderContext(
+        "!cluster", "!b*o*!xer", "oozie*", null, null, null, null);
+    String uid = TimelineUIDConverter.FLOW_UID.encodeUID(context);
+    assertEquals("*!cluster!*!b**o***!xer!oozie**", uid);
+    assertEquals(context, TimelineUIDConverter.FLOW_UID.decodeUID(uid));
+
+    context = new TimelineReaderContext("!cluster*", "!b*o!!x!*er", "*oozie!",
+        123L, null, null, null);
+    uid = TimelineUIDConverter.FLOWRUN_UID.encodeUID(context);
+    assertEquals("*!cluster**!*!b**o*!*!x*!**er!**oozie*!!123", uid);
+    assertEquals(context, TimelineUIDConverter.FLOWRUN_UID.decodeUID(uid));
+
+    context = new TimelineReaderContext("yarn_cluster", "root", "hive_join",
+        1234L, "application_1111111111_1111", null, null);
+    uid = TimelineUIDConverter.APPLICATION_UID.encodeUID(context);
+    assertEquals(
+        "yarn_cluster!root!hive_join!1234!application_1111111111_1111", uid);
+    assertEquals(context, TimelineUIDConverter.APPLICATION_UID.decodeUID(uid));
+    context = new TimelineReaderContext("yarn_cluster", null, null, null,
+        "application_1111111111_1111", null, null);
+    uid = TimelineUIDConverter.APPLICATION_UID.encodeUID(context);
+    assertEquals("yarn_cluster!application_1111111111_1111", uid);
+    assertEquals(context, TimelineUIDConverter.APPLICATION_UID.decodeUID(uid));
+
+    context = new TimelineReaderContext("yarn_cluster", "root", "hive_join",
+        1234L, "application_1111111111_1111", "YARN_CONTAINER",
+        "container_1111111111_1111_01_000001");
+    uid = TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(context);
+    
assertEquals("yarn_cluster!root!hive_join!1234!application_1111111111_1111!"
+        + "YARN_CONTAINER!container_1111111111_1111_01_000001", uid);
+    assertEquals(
+        context, TimelineUIDConverter.GENERIC_ENTITY_UID.decodeUID(uid));
+    context = new TimelineReaderContext("yarn_cluster",null, null, null,
+        "application_1111111111_1111", "YARN_CONTAINER",
+        "container_1111111111_1111_01_000001");
+    uid = TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(context);
+    assertEquals("yarn_cluster!application_1111111111_1111!YARN_CONTAINER!" +
+        "container_1111111111_1111_01_000001", uid);
+    assertEquals(
+        context, TimelineUIDConverter.GENERIC_ENTITY_UID.decodeUID(uid));
+  }
+
+  @Test
+  public void testUIDNotProperlyEscaped() throws Exception {
+    try {
+      
TimelineUIDConverter.FLOW_UID.decodeUID("*!cluster!*!b*o***!xer!oozie**");
+      fail("UID not properly escaped. Exception should have been thrown.");
+    } catch (IllegalArgumentException e) {
+    }
+
+    try {
+      
TimelineUIDConverter.FLOW_UID.decodeUID("*!cluster!*!b**o***!xer!oozie*");
+      fail("UID not properly escaped. Exception should have been thrown.");
+    } catch (IllegalArgumentException e) {
+    }
+
+    try {
+      TimelineUIDConverter.FLOW_UID.decodeUID("*!cluster!*!b**o***xer!oozie*");
+      fail("UID not properly escaped. Exception should have been thrown.");
+    } catch (IllegalArgumentException e) {
+    }
+    assertNull(TimelineUIDConverter.
+        FLOW_UID.decodeUID("!cluster!*!b**o***!xer!oozie**"));
+    assertNull(TimelineUIDConverter.
+        FLOW_UID.decodeUID("*!cluster!*!b**o**!xer!oozie**"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java
index 330adb3..58d5e61 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java
@@ -127,7 +127,7 @@ public class TestPhoenixOfflineAggregationWriterImpl 
extends BaseTest {
     TimelineEntities te = new TimelineEntities();
     te.addEntity(getTestAggregationTimelineEntity());
     TimelineCollectorContext context = new 
TimelineCollectorContext("cluster_1",
-        "user1", "testFlow", null, 0, null);
+        "user1", "testFlow", null, 0L, null);
     storage.writeAggregatedEntity(context, te,
         aggregationInfo);
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to