http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
deleted file mode 100644
index 3064d2d..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import java.util.Collections;
-import java.util.Set;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.server.webapp.WebServices;
-import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
-import org.apache.hadoop.yarn.webapp.BadRequestException;
-
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-
-@Singleton
-@Path("/ws/v1/applicationhistory")
-public class AHSWebServices extends WebServices {
-
-  @Inject
-  public AHSWebServices(ApplicationBaseProtocol appBaseProt) {
-    super(appBaseProt);
-  }
-
-  @GET
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  public AppsInfo get(@Context HttpServletRequest req,
-      @Context HttpServletResponse res) {
-    return getApps(req, res, null, Collections.<String> emptySet(), null, null,
-      null, null, null, null, null, null, Collections.<String> emptySet());
-  }
-
-  @GET
-  @Path("/apps")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  @Override
-  public AppsInfo getApps(@Context HttpServletRequest req,
-      @Context HttpServletResponse res, @QueryParam("state") String stateQuery,
-      @QueryParam("states") Set<String> statesQuery,
-      @QueryParam("finalStatus") String finalStatusQuery,
-      @QueryParam("user") String userQuery,
-      @QueryParam("queue") String queueQuery,
-      @QueryParam("limit") String count,
-      @QueryParam("startedTimeBegin") String startedBegin,
-      @QueryParam("startedTimeEnd") String startedEnd,
-      @QueryParam("finishedTimeBegin") String finishBegin,
-      @QueryParam("finishedTimeEnd") String finishEnd,
-      @QueryParam("applicationTypes") Set<String> applicationTypes) {
-    init(res);
-    validateStates(stateQuery, statesQuery);
-    return super.getApps(req, res, stateQuery, statesQuery, finalStatusQuery,
-      userQuery, queueQuery, count, startedBegin, startedEnd, finishBegin,
-      finishEnd, applicationTypes);
-  }
-
-  @GET
-  @Path("/apps/{appid}")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  @Override
-  public AppInfo getApp(@Context HttpServletRequest req,
-      @Context HttpServletResponse res, @PathParam("appid") String appId) {
-    init(res);
-    return super.getApp(req, res, appId);
-  }
-
-  @GET
-  @Path("/apps/{appid}/appattempts")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  @Override
-  public AppAttemptsInfo getAppAttempts(@Context HttpServletRequest req,
-      @Context HttpServletResponse res, @PathParam("appid") String appId) {
-    init(res);
-    return super.getAppAttempts(req, res, appId);
-  }
-
-  @GET
-  @Path("/apps/{appid}/appattempts/{appattemptid}")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  @Override
-  public AppAttemptInfo getAppAttempt(@Context HttpServletRequest req,
-      @Context HttpServletResponse res, @PathParam("appid") String appId,
-      @PathParam("appattemptid") String appAttemptId) {
-    init(res);
-    return super.getAppAttempt(req, res, appId, appAttemptId);
-  }
-
-  @GET
-  @Path("/apps/{appid}/appattempts/{appattemptid}/containers")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  @Override
-  public ContainersInfo getContainers(@Context HttpServletRequest req,
-      @Context HttpServletResponse res, @PathParam("appid") String appId,
-      @PathParam("appattemptid") String appAttemptId) {
-    init(res);
-    return super.getContainers(req, res, appId, appAttemptId);
-  }
-
-  @GET
-  @Path("/apps/{appid}/appattempts/{appattemptid}/containers/{containerid}")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  @Override
-  public ContainerInfo getContainer(@Context HttpServletRequest req,
-      @Context HttpServletResponse res, @PathParam("appid") String appId,
-      @PathParam("appattemptid") String appAttemptId,
-      @PathParam("containerid") String containerId) {
-    init(res);
-    return super.getContainer(req, res, appId, appAttemptId, containerId);
-  }
-
-  private static void
-      validateStates(String stateQuery, Set<String> statesQuery) {
-    // stateQuery is deprecated.
-    if (stateQuery != null && !stateQuery.isEmpty()) {
-      statesQuery.add(stateQuery);
-    }
-    Set<String> appStates = parseQueries(statesQuery, true);
-    for (String appState : appStates) {
-      switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
-        case FINISHED:
-        case FAILED:
-        case KILLED:
-          continue;
-        default:
-          throw new BadRequestException("Invalid application-state " + appState
-              + " specified. It should be a final state");
-      }
-    }
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
new file mode 100644
index 0000000..0bf962e
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import org.apache.hadoop.yarn.webapp.Controller;
+
+import com.google.inject.Inject;
+
+public class AMSController extends Controller {
+
+  @Inject
+  AMSController(RequestContext ctx) {
+    super(ctx);
+  }
+
+  @Override
+  public void index() {
+    setTitle("Ambari Metrics Service");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
new file mode 100644
index 0000000..2f6eec7
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
+
+public class AMSWebApp extends WebApp implements YarnWebParams {
+  
+  private final TimelineMetricStore timelineMetricStore;
+
+  public AMSWebApp(TimelineMetricStore timelineMetricStore) {
+    this.timelineMetricStore = timelineMetricStore;
+  }
+
+  @Override
+  public void setup() {
+    bind(YarnJacksonJaxbJsonProvider.class);
+    bind(TimelineWebServices.class);
+    bind(GenericExceptionHandler.class);
+    bind(TimelineMetricStore.class).toInstance(timelineMetricStore);
+    route("/", AMSController.class);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java
deleted file mode 100644
index 63b44bd..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-
-import org.apache.hadoop.yarn.server.webapp.AppAttemptBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-
-public class AppAttemptPage extends AHSView {
-
-  @Override
-  protected void preHead(Page.HTML<_> html) {
-    commonPreHead(html);
-
-    String appAttemptId = $(YarnWebParams.APPLICATION_ATTEMPT_ID);
-    set(
-      TITLE,
-      appAttemptId.isEmpty() ? "Bad request: missing application attempt ID"
-          : join("Application Attempt ",
-            $(YarnWebParams.APPLICATION_ATTEMPT_ID)));
-
-    set(DATATABLES_ID, "containers");
-    set(initID(DATATABLES, "containers"), containersTableInit());
-    setTableStyles(html, "containers", ".queue {width:6em}", ".ui 
{width:8em}");
-  }
-
-  @Override
-  protected Class<? extends SubView> content() {
-    return AppAttemptBlock.class;
-  }
-
-  private String containersTableInit() {
-    return tableInit().append(", 'aaData': containersTableData")
-      .append(", bDeferRender: true").append(", bProcessing: true")
-
-      .append("\n, aoColumnDefs: ").append(getContainersTableColumnDefs())
-
-      // Sort by id upon page load
-      .append(", aaSorting: [[0, 'desc']]}").toString();
-  }
-
-  protected String getContainersTableColumnDefs() {
-    StringBuilder sb = new StringBuilder();
-    return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
-      .append(", 'mRender': parseHadoopID }]").toString();
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java
deleted file mode 100644
index 96ca659..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-
-import org.apache.hadoop.yarn.server.webapp.AppBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-
-public class AppPage extends AHSView {
-
-  @Override
-  protected void preHead(Page.HTML<_> html) {
-    commonPreHead(html);
-
-    String appId = $(YarnWebParams.APPLICATION_ID);
-    set(
-      TITLE,
-      appId.isEmpty() ? "Bad request: missing application ID" : join(
-        "Application ", $(YarnWebParams.APPLICATION_ID)));
-
-    set(DATATABLES_ID, "attempts");
-    set(initID(DATATABLES, "attempts"), attemptsTableInit());
-    setTableStyles(html, "attempts", ".queue {width:6em}", ".ui {width:8em}");
-  }
-
-  @Override
-  protected Class<? extends SubView> content() {
-    return AppBlock.class;
-  }
-
-  private String attemptsTableInit() {
-    return tableInit().append(", 'aaData': attemptsTableData")
-      .append(", bDeferRender: true").append(", bProcessing: true")
-
-      .append("\n, aoColumnDefs: ").append(getAttemptsTableColumnDefs())
-
-      // Sort by id upon page load
-      .append(", aaSorting: [[0, 'desc']]}").toString();
-  }
-
-  protected String getAttemptsTableColumnDefs() {
-    StringBuilder sb = new StringBuilder();
-    return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
-      .append(", 'mRender': parseHadoopID }")
-
-      .append("\n, {'sType':'numeric', 'aTargets': [1]")
-      .append(", 'mRender': renderHadoopDate }]").toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java
deleted file mode 100644
index 1be8a26..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-
-import org.apache.hadoop.yarn.server.webapp.ContainerBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-
-public class ContainerPage extends AHSView {
-
-  @Override
-  protected void preHead(Page.HTML<_> html) {
-    commonPreHead(html);
-
-    String containerId = $(YarnWebParams.CONTAINER_ID);
-    set(TITLE, containerId.isEmpty() ? "Bad request: missing container ID"
-        : join("Container ", $(YarnWebParams.CONTAINER_ID)));
-  }
-
-  @Override
-  protected Class<? extends SubView> content() {
-    return ContainerBlock.class;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java
deleted file mode 100644
index e84ddec..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-
-public class NavBlock extends HtmlBlock {
-
-  @Override
-  public void render(Block html) {
-    html.
-        div("#nav").
-            h3("Application History").
-                ul().
-                    li().a(url("apps"), "Applications").
-                        ul().
-                            li().a(url("apps",
-                                YarnApplicationState.FINISHED.toString()),
-                                YarnApplicationState.FINISHED.toString()).
-                            _().
-                            li().a(url("apps",
-                                YarnApplicationState.FAILED.toString()),
-                                YarnApplicationState.FAILED.toString()).
-                            _().
-                            li().a(url("apps",
-                                YarnApplicationState.KILLED.toString()),
-                                YarnApplicationState.KILLED.toString()).
-                            _().
-                        _().
-                    _().
-                _().
-            _();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
index dc401e6..2930b33 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
@@ -18,44 +18,26 @@
 
 package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
 
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
-import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
-import 
org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
-import org.apache.hadoop.metrics2.sink.timeline.TopNConfig;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.metrics2.sink.timeline.Precision;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
-import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
-import org.apache.hadoop.yarn.webapp.BadRequestException;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
 import javax.ws.rs.POST;
 import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.WebApplicationException;
@@ -66,40 +48,40 @@ import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
 
-import static org.apache.hadoop.yarn.util.StringHelper.CSV_JOINER;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
+import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
+import org.apache.hadoop.metrics2.sink.timeline.Precision;
+import 
org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.metrics2.sink.timeline.TopNConfig;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import org.apache.hadoop.yarn.webapp.BadRequestException;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
 
 @Singleton
 @Path("/ws/v1/timeline")
-//TODO: support XML serialization/deserialization
 public class TimelineWebServices {
-
   private static final Log LOG = LogFactory.getLog(TimelineWebServices.class);
-
-  private TimelineStore store;
+  
   private TimelineMetricStore timelineMetricStore;
 
   @Inject
-  public TimelineWebServices(TimelineStore store,
-                             TimelineMetricStore timelineMetricStore) {
-    this.store = store;
+  public TimelineWebServices(TimelineMetricStore timelineMetricStore) {
     this.timelineMetricStore = timelineMetricStore;
   }
 
@@ -139,125 +121,7 @@ public class TimelineWebServices {
       @Context HttpServletRequest req,
       @Context HttpServletResponse res) {
     init(res);
-    return new AboutInfo("Timeline API");
-  }
-
-  /**
-   * Return a list of entities that match the given parameters.
-   */
-  @GET
-  @Path("/{entityType}")
-  @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
-  public TimelineEntities getEntities(
-      @Context HttpServletRequest req,
-      @Context HttpServletResponse res,
-      @PathParam("entityType") String entityType,
-      @QueryParam("primaryFilter") String primaryFilter,
-      @QueryParam("secondaryFilter") String secondaryFilter,
-      @QueryParam("windowStart") String windowStart,
-      @QueryParam("windowEnd") String windowEnd,
-      @QueryParam("fromId") String fromId,
-      @QueryParam("fromTs") String fromTs,
-      @QueryParam("limit") String limit,
-      @QueryParam("fields") String fields) {
-    init(res);
-    TimelineEntities entities = null;
-    try {
-      entities = store.getEntities(
-        parseStr(entityType),
-        parseLongStr(limit),
-        parseLongStr(windowStart),
-        parseLongStr(windowEnd),
-        parseStr(fromId),
-        parseLongStr(fromTs),
-        parsePairStr(primaryFilter, ":"),
-        parsePairsStr(secondaryFilter, ",", ":"),
-        parseFieldsStr(fields, ","));
-    } catch (NumberFormatException e) {
-      throw new BadRequestException(
-          "windowStart, windowEnd or limit is not a numeric value.");
-    } catch (IllegalArgumentException e) {
-      throw new BadRequestException("requested invalid field.");
-    } catch (IOException e) {
-      LOG.error("Error getting entities", e);
-      throw new WebApplicationException(e,
-          Response.Status.INTERNAL_SERVER_ERROR);
-    }
-    if (entities == null) {
-      return new TimelineEntities();
-    }
-    return entities;
-  }
-
-  /**
-   * Return a single entity of the given entity type and Id.
-   */
-  @GET
-  @Path("/{entityType}/{entityId}")
-  @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
-  public TimelineEntity getEntity(
-      @Context HttpServletRequest req,
-      @Context HttpServletResponse res,
-      @PathParam("entityType") String entityType,
-      @PathParam("entityId") String entityId,
-      @QueryParam("fields") String fields) {
-    init(res);
-    TimelineEntity entity = null;
-    try {
-      entity =
-          store.getEntity(parseStr(entityId), parseStr(entityType),
-            parseFieldsStr(fields, ","));
-    } catch (IllegalArgumentException e) {
-      throw new BadRequestException(
-          "requested invalid field.");
-    } catch (IOException e) {
-      LOG.error("Error getting entity", e);
-      throw new WebApplicationException(e,
-          Response.Status.INTERNAL_SERVER_ERROR);
-    }
-    if (entity == null) {
-      throw new WebApplicationException(Response.Status.NOT_FOUND);
-    }
-    return entity;
-  }
-
-  /**
-   * Return the events that match the given parameters.
-   */
-  @GET
-  @Path("/{entityType}/events")
-  @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
-  public TimelineEvents getEvents(
-      @Context HttpServletRequest req,
-      @Context HttpServletResponse res,
-      @PathParam("entityType") String entityType,
-      @QueryParam("entityId") String entityId,
-      @QueryParam("eventType") String eventType,
-      @QueryParam("windowStart") String windowStart,
-      @QueryParam("windowEnd") String windowEnd,
-      @QueryParam("limit") String limit) {
-    init(res);
-    TimelineEvents events = null;
-    try {
-      events = store.getEntityTimelines(
-        parseStr(entityType),
-        parseArrayStr(entityId, ","),
-        parseLongStr(limit),
-        parseLongStr(windowStart),
-        parseLongStr(windowEnd),
-        parseArrayStr(eventType, ","));
-    } catch (NumberFormatException e) {
-      throw new BadRequestException(
-          "windowStart, windowEnd or limit is not a numeric value.");
-    } catch (IOException e) {
-      LOG.error("Error getting entity timelines", e);
-      throw new WebApplicationException(e,
-          Response.Status.INTERNAL_SERVER_ERROR);
-    }
-    if (events == null) {
-      return new TimelineEvents();
-    }
-    return events;
+    return new AboutInfo("AMS API");
   }
 
   /**
@@ -559,42 +423,6 @@ public class TimelineWebServices {
     return timelineMetricStore.getLiveInstances();
   }
 
-  /**
-   * Store the given entities into the timeline store, and return the errors
-   * that happen during storing.
-   */
-  @POST
-  @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
-  public TimelinePutResponse postEntities(
-      @Context HttpServletRequest req,
-      @Context HttpServletResponse res,
-      TimelineEntities entities) {
-    init(res);
-    if (entities == null) {
-      return new TimelinePutResponse();
-    }
-    try {
-      List<EntityIdentifier> entityIDs = new ArrayList<EntityIdentifier>();
-      for (TimelineEntity entity : entities.getEntities()) {
-        EntityIdentifier entityID =
-            new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
-        entityIDs.add(entityID);
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("Storing the entity " + entityID + ", JSON-style content: "
-              + TimelineUtils.dumpTimelineRecordtoJSON(entity));
-        }
-      }
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs));
-      }
-      return store.put(entities);
-    } catch (IOException e) {
-      LOG.error("Error putting entities", e);
-      throw new WebApplicationException(e,
-          Response.Status.INTERNAL_SERVER_ERROR);
-    }
-  }
-
   private void init(HttpServletResponse response) {
     response.setContentType(null);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
deleted file mode 100644
index ec9b49d..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerState;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
-
-public class ApplicationHistoryStoreTestUtils {
-
-  protected ApplicationHistoryStore store;
-
-  protected void writeApplicationStartData(ApplicationId appId)
-      throws IOException {
-    store.applicationStarted(ApplicationStartData.newInstance(appId,
-      appId.toString(), "test type", "test queue", "test user", 0, 0));
-  }
-
-  protected void writeApplicationFinishData(ApplicationId appId)
-      throws IOException {
-    store.applicationFinished(ApplicationFinishData.newInstance(appId, 0,
-      appId.toString(), FinalApplicationStatus.UNDEFINED,
-      YarnApplicationState.FINISHED));
-  }
-
-  protected void writeApplicationAttemptStartData(
-      ApplicationAttemptId appAttemptId) throws IOException {
-    store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
-      appAttemptId, appAttemptId.toString(), 0,
-      ContainerId.newContainerId(appAttemptId, 1)));
-  }
-
-  protected void writeApplicationAttemptFinishData(
-      ApplicationAttemptId appAttemptId) throws IOException {
-    store.applicationAttemptFinished(ApplicationAttemptFinishData.newInstance(
-      appAttemptId, appAttemptId.toString(), "test tracking url",
-      FinalApplicationStatus.UNDEFINED, YarnApplicationAttemptState.FINISHED));
-  }
-
-  protected void writeContainerStartData(ContainerId containerId)
-      throws IOException {
-    store.containerStarted(ContainerStartData.newInstance(containerId,
-      Resource.newInstance(0, 0), NodeId.newInstance("localhost", 0),
-      Priority.newInstance(containerId.getId()), 0));
-  }
-
-  protected void writeContainerFinishData(ContainerId containerId)
-      throws IOException {
-    store.containerFinished(ContainerFinishData.newInstance(containerId, 0,
-      containerId.toString(), 0, ContainerState.COMPLETE));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
deleted file mode 100644
index f93ac5e..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.List;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.conf.Configuration;
-import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
-import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
-import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
-import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-// Timeline service client support is not enabled for AMBARI_METRICS
-@Ignore
-public class TestApplicationHistoryClientService extends
-    ApplicationHistoryStoreTestUtils {
-
-  ApplicationHistoryServer historyServer = null;
-  String expectedLogUrl = null;
-
-  @Before
-  public void setup() {
-    historyServer = new ApplicationHistoryServer();
-    Configuration config = new YarnConfiguration();
-    expectedLogUrl = WebAppUtils.getHttpSchemePrefix(config) +
-        WebAppUtils.getAHSWebAppURLWithoutScheme(config) +
-        "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" +
-        "container_0_0001_01_000001/test user";
-    config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
-      MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
-    historyServer.init(config);
-    historyServer.start();
-    store =
-        ((ApplicationHistoryManagerImpl) historyServer.getApplicationHistory())
-          .getHistoryStore();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    historyServer.stop();
-  }
-
-  @Test
-  public void testApplicationReport() throws IOException, YarnException {
-    ApplicationId appId = null;
-    appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    writeApplicationFinishData(appId);
-    GetApplicationReportRequest request =
-        GetApplicationReportRequest.newInstance(appId);
-    GetApplicationReportResponse response =
-        historyServer.getClientService().getClientHandler()
-          .getApplicationReport(request);
-    ApplicationReport appReport = response.getApplicationReport();
-    Assert.assertNotNull(appReport);
-    Assert.assertEquals("application_0_0001", appReport.getApplicationId()
-      .toString());
-    Assert.assertEquals("test type", 
appReport.getApplicationType().toString());
-    Assert.assertEquals("test queue", appReport.getQueue().toString());
-  }
-
-  @Test
-  public void testApplications() throws IOException, YarnException {
-    ApplicationId appId = null;
-    appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    writeApplicationFinishData(appId);
-    ApplicationId appId1 = ApplicationId.newInstance(0, 2);
-    writeApplicationStartData(appId1);
-    writeApplicationFinishData(appId1);
-    GetApplicationsRequest request = GetApplicationsRequest.newInstance();
-    GetApplicationsResponse response =
-        historyServer.getClientService().getClientHandler()
-          .getApplications(request);
-    List<ApplicationReport> appReport = response.getApplicationList();
-    Assert.assertNotNull(appReport);
-    Assert.assertEquals(appId, appReport.get(0).getApplicationId());
-    Assert.assertEquals(appId1, appReport.get(1).getApplicationId());
-  }
-
-  @Test
-  public void testApplicationAttemptReport() throws IOException, YarnException 
{
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    writeApplicationAttemptStartData(appAttemptId);
-    writeApplicationAttemptFinishData(appAttemptId);
-    GetApplicationAttemptReportRequest request =
-        GetApplicationAttemptReportRequest.newInstance(appAttemptId);
-    GetApplicationAttemptReportResponse response =
-        historyServer.getClientService().getClientHandler()
-          .getApplicationAttemptReport(request);
-    ApplicationAttemptReport attemptReport =
-        response.getApplicationAttemptReport();
-    Assert.assertNotNull(attemptReport);
-    Assert.assertEquals("appattempt_0_0001_000001", attemptReport
-      .getApplicationAttemptId().toString());
-  }
-
-  @Test
-  public void testApplicationAttempts() throws IOException, YarnException {
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    ApplicationAttemptId appAttemptId1 =
-        ApplicationAttemptId.newInstance(appId, 2);
-    writeApplicationAttemptStartData(appAttemptId);
-    writeApplicationAttemptFinishData(appAttemptId);
-    writeApplicationAttemptStartData(appAttemptId1);
-    writeApplicationAttemptFinishData(appAttemptId1);
-    GetApplicationAttemptsRequest request =
-        GetApplicationAttemptsRequest.newInstance(appId);
-    GetApplicationAttemptsResponse response =
-        historyServer.getClientService().getClientHandler()
-          .getApplicationAttempts(request);
-    List<ApplicationAttemptReport> attemptReports =
-        response.getApplicationAttemptList();
-    Assert.assertNotNull(attemptReports);
-    Assert.assertEquals(appAttemptId, attemptReports.get(0)
-      .getApplicationAttemptId());
-    Assert.assertEquals(appAttemptId1, attemptReports.get(1)
-      .getApplicationAttemptId());
-  }
-
-  @Test
-  public void testContainerReport() throws IOException, YarnException {
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
-    writeContainerStartData(containerId);
-    writeContainerFinishData(containerId);
-    writeApplicationFinishData(appId);
-    GetContainerReportRequest request =
-        GetContainerReportRequest.newInstance(containerId);
-    GetContainerReportResponse response =
-        historyServer.getClientService().getClientHandler()
-          .getContainerReport(request);
-    ContainerReport container = response.getContainerReport();
-    Assert.assertNotNull(container);
-    Assert.assertEquals(containerId, container.getContainerId());
-    Assert.assertEquals(expectedLogUrl, container.getLogUrl());
-  }
-
-  @Test
-  public void testContainers() throws IOException, YarnException {
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
-    ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 2);
-    writeContainerStartData(containerId);
-    writeContainerFinishData(containerId);
-    writeContainerStartData(containerId1);
-    writeContainerFinishData(containerId1);
-    writeApplicationFinishData(appId);
-    GetContainersRequest request =
-        GetContainersRequest.newInstance(appAttemptId);
-    GetContainersResponse response =
-        historyServer.getClientService().getClientHandler()
-          .getContainers(request);
-    List<ContainerReport> containers = response.getContainerList();
-    Assert.assertNotNull(containers);
-    Assert.assertEquals(containerId, containers.get(1).getContainerId());
-    Assert.assertEquals(containerId1, containers.get(0).getContainerId());
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
deleted file mode 100644
index aad23d9..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-public class TestApplicationHistoryManagerImpl extends
-    ApplicationHistoryStoreTestUtils {
-  ApplicationHistoryManagerImpl applicationHistoryManagerImpl = null;
-
-  @Before
-  public void setup() throws Exception {
-    Configuration config = new Configuration();
-    config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
-      MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
-    applicationHistoryManagerImpl = new ApplicationHistoryManagerImpl();
-    applicationHistoryManagerImpl.init(config);
-    applicationHistoryManagerImpl.start();
-    store = applicationHistoryManagerImpl.getHistoryStore();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    applicationHistoryManagerImpl.stop();
-  }
-
-  @Test
-  @Ignore
-  public void testApplicationReport() throws IOException, YarnException {
-    ApplicationId appId = null;
-    appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    writeApplicationFinishData(appId);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    writeApplicationAttemptStartData(appAttemptId);
-    writeApplicationAttemptFinishData(appAttemptId);
-    ApplicationReport appReport =
-        applicationHistoryManagerImpl.getApplication(appId);
-    Assert.assertNotNull(appReport);
-    Assert.assertEquals(appId, appReport.getApplicationId());
-    Assert.assertEquals(appAttemptId,
-      appReport.getCurrentApplicationAttemptId());
-    Assert.assertEquals(appAttemptId.toString(), appReport.getHost());
-    Assert.assertEquals("test type", 
appReport.getApplicationType().toString());
-    Assert.assertEquals("test queue", appReport.getQueue().toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
deleted file mode 100644
index 03205e7..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ /dev/null
@@ -1,267 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.service.Service.STATE;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.HBaseTimelineMetricsService;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.MetricCollectorHAController;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultPhoenixDataSource;
-import org.apache.zookeeper.ClientCnxn;
-import org.easymock.EasyMock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.junit.runner.RunWith;
-import org.powermock.api.easymock.PowerMock;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.Statement;
-
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_SITE_CONFIGURATION_FILE;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.fail;
-import static org.powermock.api.easymock.PowerMock.expectNew;
-import static org.powermock.api.easymock.PowerMock.mockStatic;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-import static org.powermock.api.easymock.PowerMock.verifyAll;
-import static 
org.powermock.api.support.membermodification.MemberMatcher.method;
-import static 
org.powermock.api.support.membermodification.MemberModifier.suppress;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({ PhoenixHBaseAccessor.class, 
HBaseTimelineMetricsService.class, UserGroupInformation.class,
-  ClientCnxn.class, DefaultPhoenixDataSource.class, ConnectionFactory.class,
-  TimelineMetricConfiguration.class, ApplicationHistoryServer.class })
-@PowerMockIgnore( {"javax.management.*"})
-public class TestApplicationHistoryServer {
-
-  ApplicationHistoryServer historyServer = null;
-  Configuration metricsConf = null;
-
-  @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
-
-  @Before
-  @SuppressWarnings("all")
-  public void setup() throws URISyntaxException, IOException {
-    folder.create();
-    File hbaseSite = folder.newFile("hbase-site.xml");
-    File amsSite = folder.newFile("ams-site.xml");
-
-    FileUtils.writeStringToFile(hbaseSite, "<configuration>\n" +
-      "  <property>\n" +
-      "    <name>hbase.defaults.for.version.skip</name>\n" +
-      "    <value>true</value>\n" +
-      "  </property>" +
-      "  <property> " +
-      "    <name>hbase.zookeeper.quorum</name>\n" +
-      "    <value>localhost</value>\n" +
-      "  </property>" +
-      "</configuration>");
-
-    FileUtils.writeStringToFile(amsSite, "<configuration>\n" +
-      "  <property>\n" +
-      "    <name>test</name>\n" +
-      "    <value>testReady</value>\n" +
-      "  </property>\n" +
-      "  <property>\n" +
-      "    <name>timeline.metrics.host.aggregator.hourly.disabled</name>\n" +
-      "    <value>true</value>\n" +
-      "    <description>\n" +
-      "      Disable host based hourly aggregations.\n" +
-      "    </description>\n" +
-      "  </property>\n" +
-      "  <property>\n" +
-      "    <name>timeline.metrics.host.aggregator.minute.disabled</name>\n" +
-      "    <value>true</value>\n" +
-      "    <description>\n" +
-      "      Disable host based minute aggregations.\n" +
-      "    </description>\n" +
-      "  </property>\n" +
-      "  <property>\n" +
-      "    <name>timeline.metrics.cluster.aggregator.hourly.disabled</name>\n" 
+
-      "    <value>true</value>\n" +
-      "    <description>\n" +
-      "      Disable cluster based hourly aggregations.\n" +
-      "    </description>\n" +
-      "  </property>\n" +
-      "  <property>\n" +
-      "    <name>timeline.metrics.cluster.aggregator.minute.disabled</name>\n" 
+
-      "    <value>true</value>\n" +
-      "    <description>\n" +
-      "      Disable cluster based minute aggregations.\n" +
-      "    </description>\n" +
-      "  </property>" +
-      "</configuration>");
-
-    ClassLoader currentClassLoader = 
Thread.currentThread().getContextClassLoader();
-
-    // Add the conf dir to the classpath
-    // Chain the current thread classloader
-    URLClassLoader urlClassLoader = null;
-    try {
-      urlClassLoader = new URLClassLoader(new URL[] {
-        folder.getRoot().toURI().toURL() }, currentClassLoader);
-    } catch (MalformedURLException e) {
-      e.printStackTrace();
-    }
-
-    Thread.currentThread().setContextClassLoader(urlClassLoader);
-    metricsConf = new Configuration(false);
-    metricsConf.addResource(Thread.currentThread().getContextClassLoader()
-      .getResource(METRICS_SITE_CONFIGURATION_FILE).toURI().toURL());
-    assertNotNull(metricsConf.get("test"));
-  }
-
-  // simple test init/start/stop ApplicationHistoryServer. Status should 
change.
-  @Ignore
-  @Test(timeout = 50000)
-  public void testStartStopServer() throws Exception {
-    Configuration config = new YarnConfiguration();
-    UserGroupInformation ugi =
-      UserGroupInformation.createUserForTesting("ambari", new String[] 
{"ambari"});
-
-    mockStatic(UserGroupInformation.class);
-    expect(UserGroupInformation.getCurrentUser()).andReturn(ugi).anyTimes();
-    
expect(UserGroupInformation.isSecurityEnabled()).andReturn(false).anyTimes();
-    config.set(YarnConfiguration.APPLICATION_HISTORY_STORE,
-      
"org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore");
-    Configuration hbaseConf = new Configuration();
-    hbaseConf.set("hbase.zookeeper.quorum", "localhost");
-
-    TimelineMetricConfiguration metricConfiguration = 
PowerMock.createNiceMock(TimelineMetricConfiguration.class);
-    
expectNew(TimelineMetricConfiguration.class).andReturn(metricConfiguration);
-    expect(metricConfiguration.getHbaseConf()).andReturn(hbaseConf);
-    Configuration metricsConf = new Configuration();
-    
expect(metricConfiguration.getMetricsConf()).andReturn(metricsConf).anyTimes();
-    
expect(metricConfiguration.isTimelineMetricsServiceWatcherDisabled()).andReturn(true);
-    
expect(metricConfiguration.getTimelineMetricsServiceHandlerThreadCount()).andReturn(20).anyTimes();
-    
expect(metricConfiguration.getWebappAddress()).andReturn("localhost:9990").anyTimes();
-    
expect(metricConfiguration.getTimelineServiceRpcAddress()).andReturn("localhost:10299").anyTimes();
-    
expect(metricConfiguration.getClusterZKQuorum()).andReturn("localhost").anyTimes();
-    
expect(metricConfiguration.getClusterZKClientPort()).andReturn("2181").anyTimes();
-
-    Connection connection = createNiceMock(Connection.class);
-    Statement stmt = createNiceMock(Statement.class);
-    PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
-    ResultSet rs = createNiceMock(ResultSet.class);
-    mockStatic(DriverManager.class);
-    
expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/ams-hbase-unsecure"))
-      .andReturn(connection).anyTimes();
-    expect(connection.createStatement()).andReturn(stmt).anyTimes();
-    
expect(connection.prepareStatement(anyString())).andReturn(preparedStatement).anyTimes();
-    suppress(method(Statement.class, "executeUpdate", String.class));
-    expect(preparedStatement.executeQuery()).andReturn(rs).anyTimes();
-    expect(rs.next()).andReturn(false).anyTimes();
-    preparedStatement.close();
-    expectLastCall().anyTimes();
-    connection.close();
-    expectLastCall();
-
-    MetricCollectorHAController haControllerMock = 
PowerMock.createMock(MetricCollectorHAController.class);
-    expectNew(MetricCollectorHAController.class, metricConfiguration)
-      .andReturn(haControllerMock);
-
-    haControllerMock.initializeHAController();
-    expectLastCall().once();
-    expect(haControllerMock.isInitialized()).andReturn(false).anyTimes();
-
-    org.apache.hadoop.hbase.client.Connection conn = 
createNiceMock(org.apache.hadoop.hbase.client.Connection.class);
-    mockStatic(ConnectionFactory.class);
-    expect(ConnectionFactory.createConnection((Configuration) 
anyObject())).andReturn(conn);
-    expect(conn.getAdmin()).andReturn(null);
-
-    EasyMock.replay(connection, stmt, preparedStatement, rs);
-    replayAll();
-
-    historyServer = new ApplicationHistoryServer();
-    historyServer.init(config);
-
-    verifyAll();
-
-    assertEquals(STATE.INITED, historyServer.getServiceState());
-    assertEquals(4, historyServer.getServices().size());
-    ApplicationHistoryClientService historyService =
-      historyServer.getClientService();
-    assertNotNull(historyServer.getClientService());
-    assertEquals(STATE.INITED, historyService.getServiceState());
-
-    historyServer.start();
-    assertEquals(STATE.STARTED, historyServer.getServiceState());
-    assertEquals(STATE.STARTED, historyService.getServiceState());
-    historyServer.stop();
-    assertEquals(STATE.STOPPED, historyServer.getServiceState());
-  }
-
-  // test launch method
-  @Ignore
-  @Test(timeout = 60000)
-  public void testLaunch() throws Exception {
-
-    UserGroupInformation ugi =
-      UserGroupInformation.createUserForTesting("ambari", new 
String[]{"ambari"});
-    mockStatic(UserGroupInformation.class);
-    expect(UserGroupInformation.getCurrentUser()).andReturn(ugi).anyTimes();
-    
expect(UserGroupInformation.isSecurityEnabled()).andReturn(false).anyTimes();
-
-    ExitUtil.disableSystemExit();
-    try {
-      historyServer = ApplicationHistoryServer.launchAppHistoryServer(new 
String[0]);
-    } catch (ExitUtil.ExitException e) {
-      assertEquals(0, e.status);
-      ExitUtil.resetFirstExitException();
-      fail();
-    }
-  }
-
-  @After
-  public void stop() {
-    if (historyServer != null) {
-      historyServer.stop();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
deleted file mode 100644
index 543c25b..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.net.URI;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestFileSystemApplicationHistoryStore extends
-    ApplicationHistoryStoreTestUtils {
-
-  private FileSystem fs;
-  private Path fsWorkingPath;
-
-  @Before
-  public void setup() throws Exception {
-    fs = new RawLocalFileSystem();
-    Configuration conf = new Configuration();
-    fs.initialize(new URI("/"), conf);
-    fsWorkingPath = new Path("Test");
-    fs.delete(fsWorkingPath, true);
-    conf.set(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI, 
fsWorkingPath.toString());
-    store = new FileSystemApplicationHistoryStore();
-    store.init(conf);
-    store.start();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    store.stop();
-    fs.delete(fsWorkingPath, true);
-    fs.close();
-  }
-
-  @Test
-  public void testReadWriteHistoryData() throws IOException {
-    testWriteHistoryData(5);
-    testReadHistoryData(5);
-  }
-
-  private void testWriteHistoryData(int num) throws IOException {
-    testWriteHistoryData(num, false, false);
-  }
-  
-  private void testWriteHistoryData(
-      int num, boolean missingContainer, boolean missingApplicationAttempt)
-          throws IOException {
-    // write application history data
-    for (int i = 1; i <= num; ++i) {
-      ApplicationId appId = ApplicationId.newInstance(0, i);
-      writeApplicationStartData(appId);
-
-      // write application attempt history data
-      for (int j = 1; j <= num; ++j) {
-        ApplicationAttemptId appAttemptId =
-            ApplicationAttemptId.newInstance(appId, j);
-        writeApplicationAttemptStartData(appAttemptId);
-
-        if (missingApplicationAttempt && j == num) {
-          continue;
-        }
-        // write container history data
-        for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newContainerId(appAttemptId, 
k);
-          writeContainerStartData(containerId);
-          if (missingContainer && k == num) {
-            continue;
-          }
-          writeContainerFinishData(containerId);
-        }
-        writeApplicationAttemptFinishData(appAttemptId);
-      }
-      writeApplicationFinishData(appId);
-    }
-  }
-
-  private void testReadHistoryData(int num) throws IOException {
-    testReadHistoryData(num, false, false);
-  }
-  
-  private void testReadHistoryData(
-      int num, boolean missingContainer, boolean missingApplicationAttempt)
-          throws IOException {
-    // read application history data
-    Assert.assertEquals(num, store.getAllApplications().size());
-    for (int i = 1; i <= num; ++i) {
-      ApplicationId appId = ApplicationId.newInstance(0, i);
-      ApplicationHistoryData appData = store.getApplication(appId);
-      Assert.assertNotNull(appData);
-      Assert.assertEquals(appId.toString(), appData.getApplicationName());
-      Assert.assertEquals(appId.toString(), appData.getDiagnosticsInfo());
-
-      // read application attempt history data
-      Assert.assertEquals(num, store.getApplicationAttempts(appId).size());
-      for (int j = 1; j <= num; ++j) {
-        ApplicationAttemptId appAttemptId =
-            ApplicationAttemptId.newInstance(appId, j);
-        ApplicationAttemptHistoryData attemptData =
-            store.getApplicationAttempt(appAttemptId);
-        Assert.assertNotNull(attemptData);
-        Assert.assertEquals(appAttemptId.toString(), attemptData.getHost());
-        
-        if (missingApplicationAttempt && j == num) {
-          Assert.assertNull(attemptData.getDiagnosticsInfo());
-          continue;
-        } else {
-          Assert.assertEquals(appAttemptId.toString(),
-              attemptData.getDiagnosticsInfo());
-        }
-
-        // read container history data
-        Assert.assertEquals(num, store.getContainers(appAttemptId).size());
-        for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newContainerId(appAttemptId, 
k);
-          ContainerHistoryData containerData = store.getContainer(containerId);
-          Assert.assertNotNull(containerData);
-          Assert.assertEquals(Priority.newInstance(containerId.getId()),
-            containerData.getPriority());
-          if (missingContainer && k == num) {
-            Assert.assertNull(containerData.getDiagnosticsInfo());
-          } else {
-            Assert.assertEquals(containerId.toString(),
-                containerData.getDiagnosticsInfo());
-          }
-        }
-        ContainerHistoryData masterContainer =
-            store.getAMContainer(appAttemptId);
-        Assert.assertNotNull(masterContainer);
-        Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
-          masterContainer.getContainerId());
-      }
-    }
-  }
-
-  @Test
-  public void testWriteAfterApplicationFinish() throws IOException {
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    writeApplicationFinishData(appId);
-    // write application attempt history data
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    try {
-      writeApplicationAttemptStartData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is not opened"));
-    }
-    try {
-      writeApplicationAttemptFinishData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is not opened"));
-    }
-    // write container history data
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
-    try {
-      writeContainerStartData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is not opened"));
-    }
-    try {
-      writeContainerFinishData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is not opened"));
-    }
-  }
-
-  @Test
-  public void testMassiveWriteContainerHistoryData() throws IOException {
-    long mb = 1024 * 1024;
-    long usedDiskBefore = fs.getContentSummary(fsWorkingPath).getLength() / mb;
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    writeApplicationStartData(appId);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    for (int i = 1; i <= 1000; ++i) {
-      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
-      writeContainerStartData(containerId);
-      writeContainerFinishData(containerId);
-    }
-    writeApplicationFinishData(appId);
-    long usedDiskAfter = fs.getContentSummary(fsWorkingPath).getLength() / mb;
-    Assert.assertTrue((usedDiskAfter - usedDiskBefore) < 20);
-  }
-
-  @Test
-  public void testMissingContainerHistoryData() throws IOException {
-    testWriteHistoryData(3, true, false);
-    testReadHistoryData(3, true, false);
-  }
-  
-  @Test
-  public void testMissingApplicationAttemptHistoryData() throws IOException {
-    testWriteHistoryData(3, false, true);
-    testReadHistoryData(3, false, true);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8329f46b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
deleted file mode 100644
index b4da01a..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-public class TestMemoryApplicationHistoryStore extends
-    ApplicationHistoryStoreTestUtils {
-
-  @Before
-  public void setup() {
-    store = new MemoryApplicationHistoryStore();
-  }
-
-  @Test
-  public void testReadWriteApplicationHistory() throws Exception {
-    // Out of order
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    try {
-      writeApplicationFinishData(appId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
-        "is stored before the start information"));
-    }
-    // Normal
-    int numApps = 5;
-    for (int i = 1; i <= numApps; ++i) {
-      appId = ApplicationId.newInstance(0, i);
-      writeApplicationStartData(appId);
-      writeApplicationFinishData(appId);
-    }
-    Assert.assertEquals(numApps, store.getAllApplications().size());
-    for (int i = 1; i <= numApps; ++i) {
-      appId = ApplicationId.newInstance(0, i);
-      ApplicationHistoryData data = store.getApplication(appId);
-      Assert.assertNotNull(data);
-      Assert.assertEquals(appId.toString(), data.getApplicationName());
-      Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo());
-    }
-    // Write again
-    appId = ApplicationId.newInstance(0, 1);
-    try {
-      writeApplicationStartData(appId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-    try {
-      writeApplicationFinishData(appId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-  }
-
-  @Test
-  public void testReadWriteApplicationAttemptHistory() throws Exception {
-    // Out of order
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    try {
-      writeApplicationAttemptFinishData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
-        "is stored before the start information"));
-    }
-    // Normal
-    int numAppAttempts = 5;
-    writeApplicationStartData(appId);
-    for (int i = 1; i <= numAppAttempts; ++i) {
-      appAttemptId = ApplicationAttemptId.newInstance(appId, i);
-      writeApplicationAttemptStartData(appAttemptId);
-      writeApplicationAttemptFinishData(appAttemptId);
-    }
-    Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId)
-      .size());
-    for (int i = 1; i <= numAppAttempts; ++i) {
-      appAttemptId = ApplicationAttemptId.newInstance(appId, i);
-      ApplicationAttemptHistoryData data =
-          store.getApplicationAttempt(appAttemptId);
-      Assert.assertNotNull(data);
-      Assert.assertEquals(appAttemptId.toString(), data.getHost());
-      Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo());
-    }
-    writeApplicationFinishData(appId);
-    // Write again
-    appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
-    try {
-      writeApplicationAttemptStartData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-    try {
-      writeApplicationAttemptFinishData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-  }
-
-  @Test
-  public void testReadWriteContainerHistory() throws Exception {
-    // Out of order
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
-    try {
-      writeContainerFinishData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
-        "is stored before the start information"));
-    }
-    // Normal
-    writeApplicationAttemptStartData(appAttemptId);
-    int numContainers = 5;
-    for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newContainerId(appAttemptId, i);
-      writeContainerStartData(containerId);
-      writeContainerFinishData(containerId);
-    }
-    Assert
-      .assertEquals(numContainers, store.getContainers(appAttemptId).size());
-    for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newContainerId(appAttemptId, i);
-      ContainerHistoryData data = store.getContainer(containerId);
-      Assert.assertNotNull(data);
-      Assert.assertEquals(Priority.newInstance(containerId.getId()),
-        data.getPriority());
-      Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo());
-    }
-    ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
-    Assert.assertNotNull(masterContainer);
-    Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
-      masterContainer.getContainerId());
-    writeApplicationAttemptFinishData(appAttemptId);
-    // Write again
-    containerId = ContainerId.newContainerId(appAttemptId, 1);
-    try {
-      writeContainerStartData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-    try {
-      writeContainerFinishData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-  }
-
-  @Test
-  @Ignore
-  public void testMassiveWriteContainerHistory() throws IOException {
-    long mb = 1024 * 1024;
-    Runtime runtime = Runtime.getRuntime();
-    long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / 
mb;
-    int numContainers = 100000;
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    for (int i = 1; i <= numContainers; ++i) {
-      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
-      writeContainerStartData(containerId);
-      writeContainerFinishData(containerId);
-    }
-    long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb;
-    Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200);
-  }
-
-}

Reply via email to