http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a925cb8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java
deleted file mode 100644
index 5beb189..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java
+++ /dev/null
@@ -1,246 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.timelineservice.storage.common;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKeyPrefix;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKeyPrefix;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey;
-import org.junit.Test;
-
-
-public class TestRowKeys {
-
-  private final static String QUALIFIER_SEP = Separator.QUALIFIERS.getValue();
-  private final static byte[] QUALIFIER_SEP_BYTES = Bytes
-      .toBytes(QUALIFIER_SEP);
-  private final static String CLUSTER = "cl" + QUALIFIER_SEP + "uster";
-  private final static String USER = QUALIFIER_SEP + "user";
-  private final static String FLOW_NAME = "dummy_" + QUALIFIER_SEP + "flow"
-      + QUALIFIER_SEP;
-  private final static Long FLOW_RUN_ID;
-  private final static String APPLICATION_ID;
-  static {
-    long runid = Long.MAX_VALUE - 900L;
-    byte[] longMaxByteArr = Bytes.toBytes(Long.MAX_VALUE);
-    byte[] byteArr = Bytes.toBytes(runid);
-    int sepByteLen = QUALIFIER_SEP_BYTES.length;
-    if (sepByteLen <= byteArr.length) {
-      for (int i = 0; i < sepByteLen; i++) {
-        byteArr[i] = (byte) (longMaxByteArr[i] - QUALIFIER_SEP_BYTES[i]);
-      }
-    }
-    FLOW_RUN_ID = Bytes.toLong(byteArr);
-    long clusterTs = System.currentTimeMillis();
-    byteArr = Bytes.toBytes(clusterTs);
-    if (sepByteLen <= byteArr.length) {
-      for (int i = 0; i < sepByteLen; i++) {
-        byteArr[byteArr.length - sepByteLen + i] =
-            (byte) (longMaxByteArr[byteArr.length - sepByteLen + i] -
-                QUALIFIER_SEP_BYTES[i]);
-      }
-    }
-    clusterTs = Bytes.toLong(byteArr);
-    int seqId = 222;
-    APPLICATION_ID = ApplicationId.newInstance(clusterTs, seqId).toString();
-  }
-
-  private static void verifyRowPrefixBytes(byte[] byteRowKeyPrefix) {
-    int sepLen = QUALIFIER_SEP_BYTES.length;
-    for (int i = 0; i < sepLen; i++) {
-      assertTrue(
-          "Row key prefix not encoded properly.",
-          byteRowKeyPrefix[byteRowKeyPrefix.length - sepLen + i] ==
-              QUALIFIER_SEP_BYTES[i]);
-    }
-  }
-
-  @Test
-  public void testApplicationRowKey() {
-    byte[] byteRowKey =
-        new ApplicationRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID,
-            APPLICATION_ID).getRowKey();
-    ApplicationRowKey rowKey = ApplicationRowKey.parseRowKey(byteRowKey);
-    assertEquals(CLUSTER, rowKey.getClusterId());
-    assertEquals(USER, rowKey.getUserId());
-    assertEquals(FLOW_NAME, rowKey.getFlowName());
-    assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId());
-    assertEquals(APPLICATION_ID, rowKey.getAppId());
-
-    byte[] byteRowKeyPrefix =
-        new ApplicationRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID)
-            .getRowKeyPrefix();
-    byte[][] splits =
-        Separator.QUALIFIERS.split(byteRowKeyPrefix,
-            new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-                Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
-                Separator.VARIABLE_SIZE});
-    assertEquals(5, splits.length);
-    assertEquals(0, splits[4].length);
-    assertEquals(FLOW_NAME,
-        Separator.QUALIFIERS.decode(Bytes.toString(splits[2])));
-    assertEquals(FLOW_RUN_ID,
-        (Long) LongConverter.invertLong(Bytes.toLong(splits[3])));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-
-    byteRowKeyPrefix =
-        new ApplicationRowKeyPrefix(CLUSTER, USER, 
FLOW_NAME).getRowKeyPrefix();
-    splits =
-        Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] {
-            Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-            Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE });
-    assertEquals(4, splits.length);
-    assertEquals(0, splits[3].length);
-    assertEquals(FLOW_NAME,
-        Separator.QUALIFIERS.decode(Bytes.toString(splits[2])));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-  }
-
-  /**
-   * Tests the converters indirectly through the public methods of the
-   * corresponding rowkey.
-   */
-  @Test
-  public void testAppToFlowRowKey() {
-    byte[] byteRowKey = new AppToFlowRowKey(CLUSTER,
-        APPLICATION_ID).getRowKey();
-    AppToFlowRowKey rowKey = AppToFlowRowKey.parseRowKey(byteRowKey);
-    assertEquals(CLUSTER, rowKey.getClusterId());
-    assertEquals(APPLICATION_ID, rowKey.getAppId());
-  }
-
-  @Test
-  public void testEntityRowKey() {
-    String entityId = "!ent!ity!!id!";
-    String entityType = "entity!Type";
-    byte[] byteRowKey =
-        new EntityRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, APPLICATION_ID,
-            entityType, entityId).getRowKey();
-    EntityRowKey rowKey = EntityRowKey.parseRowKey(byteRowKey);
-    assertEquals(CLUSTER, rowKey.getClusterId());
-    assertEquals(USER, rowKey.getUserId());
-    assertEquals(FLOW_NAME, rowKey.getFlowName());
-    assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId());
-    assertEquals(APPLICATION_ID, rowKey.getAppId());
-    assertEquals(entityType, rowKey.getEntityType());
-    assertEquals(entityId, rowKey.getEntityId());
-
-    byte[] byteRowKeyPrefix =
-        new EntityRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID,
-            APPLICATION_ID, entityType).getRowKeyPrefix();
-    byte[][] splits =
-        Separator.QUALIFIERS.split(
-            byteRowKeyPrefix,
-            new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-                Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
-                AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE,
-                Separator.VARIABLE_SIZE});
-    assertEquals(7, splits.length);
-    assertEquals(0, splits[6].length);
-    assertEquals(APPLICATION_ID, new AppIdKeyConverter().decode(splits[4]));
-    assertEquals(entityType,
-        Separator.QUALIFIERS.decode(Bytes.toString(splits[5])));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-
-    byteRowKeyPrefix =
-        new EntityRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID,
-            APPLICATION_ID).getRowKeyPrefix();
-    splits =
-        Separator.QUALIFIERS.split(
-            byteRowKeyPrefix,
-            new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-                Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
-                AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE});
-    assertEquals(6, splits.length);
-    assertEquals(0, splits[5].length);
-    AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter();
-    assertEquals(APPLICATION_ID, appIdKeyConverter.decode(splits[4]));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-  }
-
-  @Test
-  public void testFlowActivityRowKey() {
-    Long ts = 1459900830000L;
-    Long dayTimestamp = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts);
-    byte[] byteRowKey =
-        new FlowActivityRowKey(CLUSTER, ts, USER, FLOW_NAME).getRowKey();
-    FlowActivityRowKey rowKey = FlowActivityRowKey.parseRowKey(byteRowKey);
-    assertEquals(CLUSTER, rowKey.getClusterId());
-    assertEquals(dayTimestamp, rowKey.getDayTimestamp());
-    assertEquals(USER, rowKey.getUserId());
-    assertEquals(FLOW_NAME, rowKey.getFlowName());
-
-    byte[] byteRowKeyPrefix =
-        new FlowActivityRowKeyPrefix(CLUSTER).getRowKeyPrefix();
-    byte[][] splits =
-        Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] {
-            Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE });
-    assertEquals(2, splits.length);
-    assertEquals(0, splits[1].length);
-    assertEquals(CLUSTER,
-        Separator.QUALIFIERS.decode(Bytes.toString(splits[0])));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-
-    byteRowKeyPrefix =
-        new FlowActivityRowKeyPrefix(CLUSTER, ts).getRowKeyPrefix();
-    splits =
-        Separator.QUALIFIERS.split(byteRowKeyPrefix,
-            new int[] {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
-                Separator.VARIABLE_SIZE});
-    assertEquals(3, splits.length);
-    assertEquals(0, splits[2].length);
-    assertEquals(CLUSTER,
-        Separator.QUALIFIERS.decode(Bytes.toString(splits[0])));
-    assertEquals(ts,
-        (Long) LongConverter.invertLong(Bytes.toLong(splits[1])));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-  }
-
-  @Test
-  public void testFlowRunRowKey() {
-    byte[] byteRowKey =
-        new FlowRunRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID).getRowKey();
-    FlowRunRowKey rowKey = FlowRunRowKey.parseRowKey(byteRowKey);
-    assertEquals(CLUSTER, rowKey.getClusterId());
-    assertEquals(USER, rowKey.getUserId());
-    assertEquals(FLOW_NAME, rowKey.getFlowName());
-    assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId());
-
-    byte[] byteRowKeyPrefix =
-        new FlowRunRowKey(CLUSTER, USER, FLOW_NAME, null).getRowKey();
-    byte[][] splits =
-        Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] {
-            Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-            Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE });
-    assertEquals(4, splits.length);
-    assertEquals(0, splits[3].length);
-    assertEquals(FLOW_NAME,
-        Separator.QUALIFIERS.decode(Bytes.toString(splits[2])));
-    verifyRowPrefixBytes(byteRowKeyPrefix);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a925cb8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java
deleted file mode 100644
index 7d37206..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-package org.apache.hadoop.yarn.server.timelineservice.storage.common;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Test;
-
-import com.google.common.collect.Iterables;
-
-public class TestSeparator {
-
-  private static String villain = "Dr. Heinz Doofenshmirtz";
-  private static String special =
-      ".   *   |   ?   +   \t   (   )   [   ]   {   }   ^   $  \\ \"  %";
-
-  /**
-   *
-   */
-  @Test
-  public void testEncodeDecodeString() {
-
-    for (Separator separator : Separator.values()) {
-      testEncodeDecode(separator, "");
-      testEncodeDecode(separator, " ");
-      testEncodeDecode(separator, "!");
-      testEncodeDecode(separator, "?");
-      testEncodeDecode(separator, "&");
-      testEncodeDecode(separator, "+");
-      testEncodeDecode(separator, "\t");
-      testEncodeDecode(separator, "Dr.");
-      testEncodeDecode(separator, "Heinz");
-      testEncodeDecode(separator, "Doofenshmirtz");
-      testEncodeDecode(separator, villain);
-      testEncodeDecode(separator, special);
-
-      assertNull(separator.encode(null));
-
-    }
-  }
-
-  private void testEncodeDecode(Separator separator, String token) {
-    String encoded = separator.encode(token);
-    String decoded = separator.decode(encoded);
-    String msg = "token:" + token + " separator:" + separator + ".";
-    assertEquals(msg, token, decoded);
-  }
-
-  @Test
-  public void testEncodeDecode() {
-    testEncodeDecode("Dr.", Separator.QUALIFIERS);
-    testEncodeDecode("Heinz", Separator.QUALIFIERS, Separator.QUALIFIERS);
-    testEncodeDecode("Doofenshmirtz", Separator.QUALIFIERS, null,
-        Separator.QUALIFIERS);
-    testEncodeDecode("&Perry", Separator.QUALIFIERS, Separator.VALUES, null);
-    testEncodeDecode("the ", Separator.QUALIFIERS, Separator.SPACE);
-    testEncodeDecode("Platypus...", (Separator) null);
-    testEncodeDecode("The what now ?!?", Separator.QUALIFIERS,
-        Separator.VALUES, Separator.SPACE);
-
-  }
-  @Test
-  public void testEncodedValues() {
-    testEncodeDecode("Double-escape %2$ and %9$ or %%2$ or %%3$, nor  %%%2$" +
-        "= no problem!",
-        Separator.QUALIFIERS, Separator.VALUES, Separator.SPACE, 
Separator.TAB);
-  }
-
-  @Test
-  public void testSplits() {
-    byte[] maxLongBytes = Bytes.toBytes(Long.MAX_VALUE);
-    byte[] maxIntBytes = Bytes.toBytes(Integer.MAX_VALUE);
-    for (Separator separator : Separator.values()) {
-      String str1 = "cl" + separator.getValue() + "us";
-      String str2 = separator.getValue() + "rst";
-      byte[] sepByteArr = Bytes.toBytes(separator.getValue());
-      byte[] longVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxLongBytes,
-          sepByteArr.length, Bytes.SIZEOF_LONG - sepByteArr.length));
-      byte[] intVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxIntBytes,
-          sepByteArr.length, Bytes.SIZEOF_INT - sepByteArr.length));
-      byte[] arr = separator.join(
-          Bytes.toBytes(separator.encode(str1)), longVal1Arr,
-          Bytes.toBytes(separator.encode(str2)), intVal1Arr);
-      int[] sizes = {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
-          Separator.VARIABLE_SIZE, Bytes.SIZEOF_INT};
-      byte[][] splits = separator.split(arr, sizes);
-      assertEquals(4, splits.length);
-      assertEquals(str1, separator.decode(Bytes.toString(splits[0])));
-      assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1]));
-      assertEquals(str2, separator.decode(Bytes.toString(splits[2])));
-      assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3]));
-
-      longVal1Arr = Bytes.add(Bytes.copy(maxLongBytes, 0, Bytes.SIZEOF_LONG -
-          sepByteArr.length), sepByteArr);
-      intVal1Arr = Bytes.add(Bytes.copy(maxIntBytes, 0, Bytes.SIZEOF_INT -
-          sepByteArr.length), sepByteArr);
-      arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr,
-          Bytes.toBytes(separator.encode(str2)), intVal1Arr);
-      splits = separator.split(arr, sizes);
-      assertEquals(4, splits.length);
-      assertEquals(str1, separator.decode(Bytes.toString(splits[0])));
-      assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1]));
-      assertEquals(str2, separator.decode(Bytes.toString(splits[2])));
-      assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3]));
-
-      longVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxLongBytes,
-          sepByteArr.length, 4 - sepByteArr.length), sepByteArr);
-      longVal1Arr = Bytes.add(longVal1Arr, Bytes.copy(maxLongBytes, 4, 3 -
-              sepByteArr.length), sepByteArr);
-      arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr,
-          Bytes.toBytes(separator.encode(str2)), intVal1Arr);
-      splits = separator.split(arr, sizes);
-      assertEquals(4, splits.length);
-      assertEquals(str1, separator.decode(Bytes.toString(splits[0])));
-      assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1]));
-      assertEquals(str2, separator.decode(Bytes.toString(splits[2])));
-      assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3]));
-
-      arr = separator.join(Bytes.toBytes(separator.encode(str1)),
-          Bytes.toBytes(separator.encode(str2)), intVal1Arr, longVal1Arr);
-      int[] sizes1 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-          Bytes.SIZEOF_INT, Bytes.SIZEOF_LONG};
-      splits = separator.split(arr, sizes1);
-      assertEquals(4, splits.length);
-      assertEquals(str1, separator.decode(Bytes.toString(splits[0])));
-      assertEquals(str2, separator.decode(Bytes.toString(splits[1])));
-      assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[2]));
-      assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[3]));
-
-      try {
-        int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE,
-            Bytes.SIZEOF_INT, 7};
-        splits = separator.split(arr, sizes2);
-        fail("Exception should have been thrown.");
-      } catch (IllegalArgumentException e) {}
-
-      try {
-        int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, 2,
-            Bytes.SIZEOF_LONG};
-        splits = separator.split(arr, sizes2);
-        fail("Exception should have been thrown.");
-      } catch (IllegalArgumentException e) {}
-    }
-  }
-
-  /**
-   * Simple test to encode and decode using the same separators and confirm 
that
-   * we end up with the same as what we started with.
-   *
-   * @param token
-   * @param separators
-   */
-  private static void testEncodeDecode(String token, Separator... separators) {
-    byte[] encoded = Separator.encode(token, separators);
-    String decoded = Separator.decode(encoded, separators);
-    assertEquals(token, decoded);
-  }
-
-  @Test
-  public void testJoinStripped() {
-    List<String> stringList = new ArrayList<String>(0);
-    stringList.add("nothing");
-
-    String joined = Separator.VALUES.joinEncoded(stringList);
-    Iterable<String> split = Separator.VALUES.splitEncoded(joined);
-    assertTrue(Iterables.elementsEqual(stringList, split));
-
-    stringList = new ArrayList<String>(3);
-    stringList.add("a");
-    stringList.add("b?");
-    stringList.add("c");
-
-    joined = Separator.VALUES.joinEncoded(stringList);
-    split = Separator.VALUES.splitEncoded(joined);
-    assertTrue(Iterables.elementsEqual(stringList, split));
-
-    String[] stringArray1 = {"else"};
-    joined = Separator.VALUES.joinEncoded(stringArray1);
-    split = Separator.VALUES.splitEncoded(joined);
-    assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray1), split));
-
-    String[] stringArray2 = {"d", "e?", "f"};
-    joined = Separator.VALUES.joinEncoded(stringArray2);
-    split = Separator.VALUES.splitEncoded(joined);
-    assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray2), split));
-
-    List<String> empty = new ArrayList<String>(0);
-    split = Separator.VALUES.splitEncoded(null);
-    assertTrue(Iterables.elementsEqual(empty, split));
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a925cb8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
index eed5523..8698433 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
@@ -43,6 +43,7 @@
     <module>hadoop-yarn-server-applicationhistoryservice</module>
     <module>hadoop-yarn-server-timeline-pluginstorage</module>
     <module>hadoop-yarn-server-timelineservice</module>
+    <module>hadoop-yarn-server-timelineservice-hbase</module>
     <module>hadoop-yarn-server-timelineservice-hbase-tests</module>
   </modules>
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a925cb8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
index e801e14..61aa6ed 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
@@ -172,7 +172,7 @@ Once you have an Apache HBase cluster ready to use for this 
purpose, perform the
 First, add the timeline service jar to the HBase classpath in all HBase 
machines in the cluster. It
 is needed for the coprocessor as well as the schema creator. For example,
 
-    cp hadoop-yarn-server-timelineservice-3.0.0-alpha1-SNAPSHOT.jar 
/usr/hbase/lib/
+    cp hadoop-yarn-server-timelineservice-hbase-3.0.0-alpha1-SNAPSHOT.jar 
/usr/hbase/lib/
 
 Then, enable the coprocessor that handles the aggregation. To enable it, add 
the following entry in
 region servers' `hbase-site.xml` file (generally located in the `conf` 
directory) as follows:

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a925cb8/hadoop-yarn-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/pom.xml b/hadoop-yarn-project/pom.xml
index 92fbfd5..2e7214a 100644
--- a/hadoop-yarn-project/pom.xml
+++ b/hadoop-yarn-project/pom.xml
@@ -74,6 +74,10 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-server-web-proxy</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-timelineservice-hbase</artifactId>
+    </dependency>
   </dependencies>
 
   <build>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to