http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsLoadSimulator.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsLoadSimulator.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsLoadSimulator.java
deleted file mode 100644
index e85c7a5..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsLoadSimulator.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Sample Usage:
- * <pre>
- * $ java -cp "dependency/*":LoadSimulator-1.0-SNAPSHOT.jar \
- * org.apache.ambari.metrics.MetricsLoadSimulator \
- * -h "bartosz.laptop" -n 2 -m "162.216.148.45" -c 10000 -s 30000</pre>
- */
-public class MetricsLoadSimulator {
-  private final static Logger LOG = 
LoggerFactory.getLogger(MetricsLoadSimulator
-    .class);
-
-  public static void main(String[] args) throws IOException, 
InterruptedException {
-    Map<String, String> mapArgs = parseArgs(args);
-
-    LoadRunner loadRunner = new LoadRunner(
-      mapArgs.get("hostName"),
-      Integer.valueOf(mapArgs.get("numberOfHosts")),
-      mapArgs.get("metricsHostName"),
-      mapArgs.get("minHostIndex") == null ? 0 : 
Integer.valueOf(mapArgs.get("minHostIndex")),
-      Integer.valueOf(mapArgs.get("collectInterval")),
-      Integer.valueOf(mapArgs.get("sendInterval")),
-      Boolean.valueOf(mapArgs.get("master"))
-    );
-
-    loadRunner.start();
-  }
-
-  /*
-   Another entry point to the test to be called from 
./jmetertest/AMSJMeterLoadTest.java
-   */
-  public static void startTest(Map<String,String> mapArgs) {
-
-    LoadRunner loadRunner = new LoadRunner(
-            mapArgs.get("hostName"),
-            Integer.valueOf(mapArgs.get("numberOfHosts")),
-            mapArgs.get("metricsHostName"),
-            mapArgs.get("minHostIndex") == null ? 0 : 
Integer.valueOf(mapArgs.get("minHostIndex")),
-            Integer.valueOf(mapArgs.get("collectInterval")),
-            Integer.valueOf(mapArgs.get("sendInterval")),
-            Boolean.valueOf(mapArgs.get("master"))
-    );
-
-    loadRunner.start();
-  }
-
-  private static Map<String, String> parseArgs(String[] args) {
-    Map<String, String> mapProps = new HashMap<String, String>();
-    mapProps.put("hostName", "host");
-    mapProps.put("numberOfHosts", "20");
-    mapProps.put("trafficType", "burst");
-    mapProps.put("metricsHostName", "localhost");
-    mapProps.put("collectInterval", "10000");
-    mapProps.put("sendInterval", "60000");
-
-    if (args.length == 0) {
-      printUsage();
-      throw new RuntimeException("Unexpected argument, See usage message.");
-    } else {
-      for (int i = 0; i < args.length; i += 2) {
-        String arg = args[i];
-        if (arg.equals("-h")) {
-          mapProps.put("hostName", args[i + 1]);
-        } else if (arg.equals("-n")) {
-          mapProps.put("numberOfHosts", args[i + 1]);
-        } else if (arg.equals("-t")) {
-          mapProps.put("trafficType", args[i + 1]);
-        } else if (arg.equals("-m")) {
-          mapProps.put("metricsHostName", args[i + 1]);
-        } else if (arg.equals("-c")) {
-          mapProps.put("collectInterval", args[i + 1]);
-        } else if (arg.equals("-s")) {
-          mapProps.put("sendInterval", args[i + 1]);
-        } else if (arg.equals("-M")) {
-          mapProps.put("master", args[i + 1]);
-        } else if (arg.equals("-d")) {
-          // a dummy switch - it says that we agree with defaults
-        } else {
-          printUsage();
-          throw new RuntimeException("Unexpected argument, See usage 
message.");
-        }
-      }
-    }
-
-    LOG.info("Recognized options: baseHostName={} hosts#={} trafficMode={} " +
-        "metricsHostName={} collectIntervalMillis={} sendIntervalMillis={} " +
-        "simulateMaster={}",
-      mapProps.get("hostName"),
-      Integer.valueOf(mapProps.get("numberOfHosts")),
-      mapProps.get("trafficType"),
-      mapProps.get("metricsHostName"),
-      Integer.valueOf(mapProps.get("collectInterval")),
-      Integer.valueOf(mapProps.get("sendInterval")),
-      Boolean.valueOf(mapProps.get("master"))
-    );
-
-    return mapProps;
-  }
-
-  public static void printUsage() {
-    System.err.println("Usage: java MetricsLoadSimulator [OPTIONS]");
-    System.err.println("Options: ");
-    System.err.println("[-h hostName] [-n numberOfHosts] "
-      + "[-t trafficMode {burst, staggered}] [-m metricsHostName] "
-      + "[-c collectIntervalMillis {10 sec}] [-s sendIntervalMillis {60 sec}]"
-      + "[-M simulateMaster {true, false}] ");
-    System.err.println();
-    System.err.println("When you select a master, then one simulated host will 
play");
-    System.err.println("a role of a master, and the rest will be slaves. 
Otherwise");
-    System.err.println("all simulation threads (single thread is for single 
host)");
-    System.err.println("will be slave hosts");
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsSenderWorker.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsSenderWorker.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsSenderWorker.java
deleted file mode 100644
index 71f2bc5..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/MetricsSenderWorker.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator;
-
-
-import java.io.IOException;
-import java.util.concurrent.Callable;
-
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.AppMetrics;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.HostMetricsGenerator;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net.MetricsSender;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net.RestMetricsSender;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.Json;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- */
-public class MetricsSenderWorker implements Callable<String> {
-  private final static Logger LOG = 
LoggerFactory.getLogger(RestMetricsSender.class);
-
-  MetricsSender sender;
-  HostMetricsGenerator hmg;
-
-  public MetricsSenderWorker(MetricsSender sender, HostMetricsGenerator 
metricsGenerator) {
-    this.sender = sender;
-    hmg = metricsGenerator;
-  }
-
-  @Override
-  public String call() throws Exception {
-    AppMetrics hostMetrics = hmg.createMetrics();
-
-    try {
-      String request = new Json().serialize(hostMetrics); //inject?
-      String response = sender.pushMetrics(request);
-
-      return response;
-    } catch (IOException e) {
-      LOG.error("Error while pushing metrics: ", e);
-      throw e;
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppID.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppID.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppID.java
deleted file mode 100644
index 56c3571..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppID.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data;
-
-public enum AppID {
-  HOST("HOST"),
-  NAMENODE("namenode"),
-  RESOURCEMANAGER("resourcemanager"),
-  DATANODE("datanode"),
-  NODEMANAGER("nodemanager"),
-  MASTER_HBASE("hbase"),
-  SLAVE_HBASE("hbase"),
-  NIMBUS("nimbus"),
-  HIVEMETASTORE("hivemetastore"),
-  HIVESERVER2("hiveserver2"),
-  KAFKA_BROKER("kafka_broker");
-
-  public static final AppID[] MASTER_APPS = {HOST, NAMENODE, RESOURCEMANAGER, 
MASTER_HBASE, KAFKA_BROKER, NIMBUS, HIVEMETASTORE, HIVESERVER2};
-  public static final AppID[] SLAVE_APPS = {HOST, DATANODE, NODEMANAGER, 
SLAVE_HBASE};
-
-  private String id;
-
-  private AppID(String id) {
-    this.id = id;
-  }
-
-  public String getId() {
-    return id;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppMetrics.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppMetrics.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppMetrics.java
deleted file mode 100644
index d9cec2b..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/AppMetrics.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-/**
- * AppMetrics is a class that helps to create properly initialized metrics for
- * current app. It also holds the
- * metrics and can be serialized to json.
- */
-public class AppMetrics {
-
-  private final Collection<Metric> metrics = new ArrayList<Metric>();
-  private final transient ApplicationInstance applicationId;
-  private final transient long startTime;
-
-  public AppMetrics(ApplicationInstance applicationId, long startTime) {
-    this.applicationId = applicationId;
-    this.startTime = startTime;
-  }
-
-  public Metric createMetric(String metricName) {
-    return new Metric(applicationId, metricName, startTime);
-  }
-
-  public void addMetric(Metric metric) {
-    metrics.add(metric);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/ApplicationInstance.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/ApplicationInstance.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/ApplicationInstance.java
deleted file mode 100644
index d99ecc9..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/ApplicationInstance.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data;
-
-/**
- * AppId is a helper class that encapsulates the common part of metrics 
message.
- * It contains hostName, appId and instanceId. It is immutable,
- * and it can not hold null values.
- */
-public final class ApplicationInstance {
-
-  private final transient String hostName;
-  private final transient AppID appId;
-  private final transient String instanceId;
-
-  /**
-   * @param hostname
-   * @param appId
-   * @param instanceId
-   */
-  public ApplicationInstance(String hostname, AppID appId, String instanceId) {
-    if (hostname == null || appId == null || instanceId == null)
-      throw new IllegalArgumentException("ApplicationInstance can not be " +
-        "instantiated with null values");
-
-    this.hostName = hostname;
-    this.appId = appId;
-    this.instanceId = instanceId;
-  }
-
-  public String getInstanceId() {
-    return instanceId;
-  }
-
-  public AppID getAppId() {
-    return appId;
-  }
-
-  public String getHostName() {
-    return hostName;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/HostMetricsGenerator.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/HostMetricsGenerator.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/HostMetricsGenerator.java
deleted file mode 100644
index f628f2c..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/HostMetricsGenerator.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
-
-
-import java.util.HashMap;
-import java.util.Map;
-
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.RandomMetricsProvider;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.TimeStampProvider;
-
-/**
- */
-public class HostMetricsGenerator {
-
-  private Map<String, RandomMetricsProvider> metricDataProviders = new 
HashMap<String, RandomMetricsProvider>();
-  private final TimeStampProvider tsp;
-  private final ApplicationInstance id;
-
-  public HostMetricsGenerator(ApplicationInstance id,
-                              TimeStampProvider timeStamps,
-                              Map<String, RandomMetricsProvider> 
metricDataProviders) {
-    this.id = id;
-    this.tsp = timeStamps;
-    this.metricDataProviders = metricDataProviders;
-  }
-
-  public AppMetrics createMetrics() {
-    long[] timestamps = tsp.timestampsForNextInterval();
-    AppMetrics appMetrics = new AppMetrics(id, timestamps[0]);
-
-    for (Map.Entry<String, RandomMetricsProvider> entry : 
metricDataProviders.entrySet()) {
-      String metricName = entry.getKey();
-      RandomMetricsProvider metricData = entry.getValue();
-
-      Metric metric = appMetrics.createMetric(metricName);
-      for (long timestamp : timestamps) {
-        metric.putMetric(timestamp, String.valueOf(metricData.next()));
-      }
-      appMetrics.addMetric(metric);
-    }
-
-    return appMetrics;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/Metric.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/Metric.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/Metric.java
deleted file mode 100644
index f274263..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/Metric.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-public class Metric {
-
-  private String instanceid;
-  private String hostname;
-  private Map<String, String> metrics = new LinkedHashMap<String, String>();
-  private String starttime;
-  private String appid;
-  private String metricname;
-
-  // i don't like this ctor, but it has to be public for json deserialization
-  public Metric() {
-  }
-
-  public Metric(ApplicationInstance app, String metricName, long startTime) {
-    this.hostname = app.getHostName();
-    this.appid = app.getAppId().getId();
-    this.instanceid = app.getInstanceId();
-    this.metricname = metricName;
-    this.starttime = Long.toString(startTime);
-  }
-
-  public void putMetric(long timestamp, String value) {
-    metrics.put(Long.toString(timestamp), value);
-  }
-
-  public String getInstanceid() {
-    return instanceid;
-  }
-
-  public String getHostname() {
-    return hostname;
-  }
-
-  public Map<String, String> getMetrics() {
-    return metrics;
-  }
-
-  public String getStarttime() {
-    return starttime;
-  }
-
-  public String getAppid() {
-    return appid;
-  }
-
-  public String getMetricname() {
-    return metricname;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/MetricsGeneratorConfigurer.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/MetricsGeneratorConfigurer.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/MetricsGeneratorConfigurer.java
deleted file mode 100644
index b315541..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/MetricsGeneratorConfigurer.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
-
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.HashMap;
-import java.util.Map;
-
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.RandomMetricsProvider;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.TimeStampProvider;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * MetricsGeneratorConfigurer is a factory that reads metrics definition from 
a file,
- * and returns an Single HostMetricsGenerator. Check createMetricsForHost 
method
- * for details.
- */
-public class MetricsGeneratorConfigurer {
-
-  private final static Logger LOG = LoggerFactory.getLogger
-    (MetricsGeneratorConfigurer.class);
-
-  /**
-   * Creates HostMetricsGenerator configured with metric names loaded from 
file.
-   *
-   * @param id         ApplicationInstance descriptor, will be used to create
-   *                   HostMetricsGenerator, cannot be null
-   * @param timeStamps configured TimeStampProvider that can provide next
-   *                   timestamp, cannot be null
-   * @return HostMetricsGenerator with given ApplicationInstance id and 
configured
-   * mapping of
-   * metric names to data providers
-   */
-  public static HostMetricsGenerator createMetricsForHost(
-    ApplicationInstance id,
-    TimeStampProvider timeStamps) {
-    return new HostMetricsGenerator(id, timeStamps, 
readMetrics(id.getAppId()));
-  }
-
-  private static Map<String, RandomMetricsProvider> readMetrics(AppID type) {
-    InputStream input = null;
-    Map<String, RandomMetricsProvider> metrics =
-      new HashMap<String, RandomMetricsProvider>();
-    String fileName = "metrics_def/" + type.toString() + ".dat";
-
-    try {
-      LOG.info("Loading " + fileName);
-
-      input = MetricsGeneratorConfigurer.class.getClassLoader()
-        .getResourceAsStream(fileName);
-
-      BufferedReader reader = new BufferedReader(new InputStreamReader(input));
-
-      String line;
-      while ((line = reader.readLine()) != null) {
-        metrics.put(line.trim(), new RandomMetricsProvider(100, 200));
-      }
-
-    } catch (IOException e) {
-      LOG.error("Cannot read file " + fileName + " for appID " + 
type.toString(), e);
-    } finally {
-      if (input != null) {
-        try {
-          input.close();
-        } catch (IOException ex) {
-          // intentionally left blank, here we cannot do anything
-        }
-      }
-    }
-
-    return metrics;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/MetricsSender.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/MetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/MetricsSender.java
deleted file mode 100644
index 35c0fc3..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/MetricsSender.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net;
-
-/**
- * MetricSender should provides a simple way of pushing metrics to some 
service.
- */
-public interface MetricsSender {
-  /**
-   * Push metrics to the metric service (e.g. a metrics storage system).
-   *
-   * @param payload the payload to be sent to metrics service
-   * @return response message either acknowledgement or error
-   */
-  String pushMetrics(String payload);
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
deleted file mode 100644
index 8eb3fec..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net;
-
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Stopwatch;
-
-/**
- * Implements MetricsSender and provides a way of pushing metrics to 
application metrics history service using REST
- * endpoint.
- */
-public class RestMetricsSender implements MetricsSender {
-  private final static Logger LOG = 
LoggerFactory.getLogger(RestMetricsSender.class);
-
-  private final static String COLLECTOR_URL = 
"http://%s/ws/v1/timeline/metrics";;
-  private final String collectorServiceAddress;
-
-  /**
-   * Creates unconnected RestMetricsSender with endpoint configured as
-   * http://${metricsHost}:6188/ws/v1/timeline/metrics,
-   * where ${metricsHost} is specified by metricHost param.
-   *
-   * @param metricsHost the hostname that will be used to access application 
metrics history service.
-   */
-  public RestMetricsSender(String metricsHost) {
-    collectorServiceAddress = String.format(COLLECTOR_URL, metricsHost);
-  }
-
-  /**
-   * Push metrics to the REST endpoint. Connection is always open and closed 
on every call.
-   *
-   * @param payload the payload with metrics to be sent to metrics service
-   * @return response message either acknowledgement or error, empty on 
exception
-   */
-  @Override
-  public String pushMetrics(String payload) {
-    String responseString = "";
-    UrlService svc = null;
-    Stopwatch timer = new Stopwatch().start();
-
-    try {
-      LOG.info("server: {}", collectorServiceAddress);
-
-      svc = getConnectedUrlService();
-      responseString = svc.send(payload);
-
-      timer.stop();
-      LOG.info("http response time: " + timer.elapsed(TimeUnit.MILLISECONDS)
-        + " ms");
-
-      if (responseString.length() > 0) {
-        LOG.debug("POST response from server: " + responseString);
-      }
-    } catch (IOException e) {
-      LOG.error("", e);
-    } finally {
-      if (svc != null) {
-        svc.disconnect();
-      }
-    }
-
-    return responseString;
-  }
-
-  /**
-   * Relaxed to protected for testing.
-   */
-  protected UrlService getConnectedUrlService() throws IOException {
-    return UrlService.newConnection(collectorServiceAddress);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
deleted file mode 100644
index aeb4ca8..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net;
-
-import java.io.PrintStream;
-
-/**
- * StdOutMetricsSender dumps metrics to defined PrintStream out. It is useful 
for testing.
- */
-public class StdOutMetricsSender implements MetricsSender {
-  public final PrintStream out;
-  private String metricsHostName;
-
-  /**
-   * Creates new StdOutMetricsSender with specified hostname (only used in 
messages) and sends output to System.out
-   *
-   * @param metricsHostName a name used in printed messages
-   */
-  public StdOutMetricsSender(String metricsHostName) {
-    this(metricsHostName, System.out);
-  }
-
-  /**
-   * Creates new StdOutMetricsSender with specified hostname (only used in 
messages) and PrintStream which is used as
-   * an output.
-   *
-   * @param metricsHostName a name used in printed messages
-   * @param out             PrintStream that the Sender will write to, can be 
System.out
-   */
-  public StdOutMetricsSender(String metricsHostName, PrintStream out) {
-    this.metricsHostName = metricsHostName;
-    this.out = out;
-  }
-
-  @Override
-  public String pushMetrics(String payload) {
-    out.println("Sending to " + metricsHostName + ": " + payload);
-
-    return "OK";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/UrlService.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/UrlService.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/UrlService.java
deleted file mode 100644
index 7402438..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/UrlService.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.net;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.OutputStreamWriter;
-import java.net.HttpURLConnection;
-import java.net.URL;
-
-public class UrlService {
-
-  public static final int CONNECT_TIMEOUT = 20000;
-  public static final int READ_TIMEOUT = 20000;
-  private final String address;
-  private HttpURLConnection conn;
-
-  private UrlService(String address) {
-    this.address = address;
-  }
-
-  /**
-   * Returns a new UrlService connected to specified address.
-   *
-   * @param address
-   * @return
-   * @throws IOException
-   */
-  public static UrlService newConnection(String address) throws IOException {
-    UrlService svc = new UrlService(address);
-    svc.connect();
-
-    return svc;
-  }
-
-  public HttpURLConnection connect() throws IOException {
-    URL url = new URL(address);
-    conn = (HttpURLConnection) url.openConnection();
-
-    //TODO: make timeouts configurable
-    conn.setConnectTimeout(CONNECT_TIMEOUT);
-    conn.setReadTimeout(READ_TIMEOUT);
-    conn.setDoInput(true);
-    conn.setDoOutput(true);
-    conn.setRequestMethod("POST");
-    conn.setRequestProperty("Content-Type", "application/json");
-    conn.setRequestProperty("Accept", "*/*");
-
-    return conn;
-  }
-
-  public String send(String payload) throws IOException {
-    if (conn == null)
-      throw new IllegalStateException("Cannot use unconnected UrlService");
-    write(payload);
-
-    return read();
-  }
-
-  private String read() throws IOException {
-    StringBuilder response = new StringBuilder();
-
-    BufferedReader br = new BufferedReader(new InputStreamReader(
-      conn.getInputStream()));
-    String line = null;
-    while ((line = br.readLine()) != null) {
-      response.append(line);
-    }
-    br.close();
-
-    return response.toString();
-  }
-
-  private void write(String payload) throws IOException {
-    OutputStreamWriter writer = new OutputStreamWriter(conn.getOutputStream(),
-      "UTF-8");
-    writer.write(payload);
-    writer.close();
-  }
-
-  public void disconnect() {
-    conn.disconnect();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/Json.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/Json.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/Json.java
deleted file mode 100644
index 982f48c..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/Json.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.util;
-
-import java.io.IOException;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonMethod;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.SerializationConfig;
-
-/**
- * Small wrapper that configures the ObjectMapper with some defaults.
- */
-public class Json {
-  private ObjectMapper myObjectMapper;
-
-  /**
-   * Creates default Json ObjectMapper that maps fields.
-   */
-  public Json() {
-    this(false);
-  }
-
-  /**
-   * Creates a Json ObjectMapper that maps fields and optionally pretty prints 
the
-   * serialized objects.
-   *
-   * @param pretty a flag - if true the output will be pretty printed.
-   */
-  public Json(boolean pretty) {
-    myObjectMapper = new ObjectMapper();
-    myObjectMapper.setVisibility(JsonMethod.FIELD, 
JsonAutoDetect.Visibility.ANY);
-    if (pretty) {
-      myObjectMapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, 
true);
-    }
-  }
-
-  public String serialize(Object o) throws IOException {
-    return myObjectMapper.writeValueAsString(o);
-  }
-
-  public <T> T deserialize(String content, Class<T> paramClass) throws 
IOException {
-    return myObjectMapper.readValue(content, paramClass);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/RandomMetricsProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/RandomMetricsProvider.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/RandomMetricsProvider.java
deleted file mode 100644
index 7910711..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/RandomMetricsProvider.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.util;
-
-import java.util.Random;
-
-/**
- */
-public class RandomMetricsProvider {
-
-  private double min;
-  private double max;
-  private Random rnd;
-
-  public RandomMetricsProvider(double min, double max) {
-    this.min = min;
-    this.max = max;
-    this.rnd = new Random();
-  }
-
-  public double next() {
-    return rnd.nextDouble() * (max - min) + min;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TimeStampProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TimeStampProvider.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TimeStampProvider.java
deleted file mode 100644
index ad7ec86..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TimeStampProvider.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.util;
-
-/**
- */
-public class TimeStampProvider {
-  private int timeStep;
-  private long currentTime;
-  private int sendInterval;
-
-  public TimeStampProvider(long startTime, int timeStep, int sendInterval) {
-    this.timeStep = timeStep;
-    this.currentTime = startTime - timeStep;
-    this.sendInterval = sendInterval;
-  }
-
-  public long next() {
-    return currentTime += timeStep;
-  }
-
-  public long[] timestampsForNextInterval() {
-    return timestampsForInterval(sendInterval);
-  }
-
-  private long[] timestampsForInterval(int sendInterval) {
-    int steps = sendInterval / timeStep;
-    long[] timestamps = new long[steps];
-
-    for (int i = 0; i < timestamps.length; i++) {
-      timestamps[i] = next();
-    }
-
-    return timestamps;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
deleted file mode 100644
index c148a43..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricsService.java
+++ /dev/null
@@ -1,594 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DEFAULT_TOPN_HOSTS_LIMIT;
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.USE_GROUPBY_AGGREGATOR_QUERIES;
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.AggregationTaskRunner.ACTUAL_AGGREGATOR_NAMES;
-
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.UnknownHostException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.commons.collections.MapUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
-import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
-import org.apache.hadoop.metrics2.sink.timeline.MetricHostAggregate;
-import org.apache.hadoop.metrics2.sink.timeline.Precision;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
-import 
org.apache.hadoop.metrics2.sink.timeline.TimelineMetricWithAggregatedValues;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.hadoop.metrics2.sink.timeline.TopNConfig;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.AggregationTaskRunner.AGGREGATOR_NAME;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.MetricCollectorHAController;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricHostMetadata;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.function.SeriesAggregateFunction;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.function.TimelineMetricsSeriesAggregateFunction;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.function.TimelineMetricsSeriesAggregateFunctionFactory;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.ConditionBuilder;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.TopNCondition;
-
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Multimap;
-
-public class HBaseTimelineMetricsService extends AbstractService implements 
TimelineMetricStore {
-
-  static final Log LOG = LogFactory.getLog(HBaseTimelineMetricsService.class);
-  private final TimelineMetricConfiguration configuration;
-  private TimelineMetricDistributedCache cache;
-  private PhoenixHBaseAccessor hBaseAccessor;
-  private static volatile boolean isInitialized = false;
-  private final ScheduledExecutorService watchdogExecutorService = 
Executors.newSingleThreadScheduledExecutor();
-  private final Map<AGGREGATOR_NAME, ScheduledExecutorService> 
scheduledExecutors = new HashMap<>();
-  private final ConcurrentHashMap<String, Long> postedAggregatedMap = new 
ConcurrentHashMap<>();
-  private TimelineMetricMetadataManager metricMetadataManager;
-  private Integer defaultTopNHostsLimit;
-  private MetricCollectorHAController haController;
-  private boolean containerMetricsDisabled = false;
-
-  /**
-   * Construct the service.
-   *
-   */
-  public HBaseTimelineMetricsService(TimelineMetricConfiguration 
configuration) {
-    super(HBaseTimelineMetricsService.class.getName());
-    this.configuration = configuration;
-  }
-
-  @Override
-  protected void serviceInit(Configuration conf) throws Exception {
-    super.serviceInit(conf);
-    initializeSubsystem();
-  }
-
-  private TimelineMetricDistributedCache startCacheNode() throws 
MalformedURLException, URISyntaxException {
-    //TODO make configurable
-    return new TimelineMetricsIgniteCache();
-  }
-
-
-  private synchronized void initializeSubsystem() {
-    if (!isInitialized) {
-      hBaseAccessor = new PhoenixHBaseAccessor(null);
-      // Initialize schema
-      hBaseAccessor.initMetricSchema();
-      // Initialize metadata from store
-      try {
-        metricMetadataManager = new 
TimelineMetricMetadataManager(hBaseAccessor);
-      } catch (MalformedURLException | URISyntaxException e) {
-        throw new ExceptionInInitializerError("Unable to initialize metadata 
manager");
-      }
-      metricMetadataManager.initializeMetadata();
-      // Initialize policies before TTL update
-      hBaseAccessor.initPoliciesAndTTL();
-      // Start HA service
-      // Start the controller
-      if (!configuration.isDistributedCollectorModeDisabled()) {
-        haController = new MetricCollectorHAController(configuration);
-        try {
-          haController.initializeHAController();
-        } catch (Exception e) {
-          LOG.error(e);
-          throw new MetricsSystemInitializationException("Unable to " +
-            "initialize HA controller", e);
-        }
-      } else {
-        LOG.info("Distributed collector mode disabled");
-      }
-
-      //Initialize whitelisting & blacklisting if needed
-      TimelineMetricsFilter.initializeMetricFilter(configuration);
-
-      Configuration metricsConf = null;
-      try {
-        metricsConf = configuration.getMetricsConf();
-      } catch (Exception e) {
-        throw new ExceptionInInitializerError("Cannot initialize 
configuration.");
-      }
-
-      if (configuration.isCollectorInMemoryAggregationEnabled()) {
-        try {
-          cache = startCacheNode();
-        } catch (Exception e) {
-          throw new MetricsSystemInitializationException("Unable to " +
-              "start cache node", e);
-        }
-      }
-
-      defaultTopNHostsLimit = 
Integer.parseInt(metricsConf.get(DEFAULT_TOPN_HOSTS_LIMIT, "20"));
-      if (Boolean.parseBoolean(metricsConf.get(USE_GROUPBY_AGGREGATOR_QUERIES, 
"true"))) {
-        LOG.info("Using group by aggregators for aggregating host and cluster 
metrics.");
-      }
-
-      // Start the cluster aggregator second
-      TimelineMetricAggregator secondClusterAggregator =
-        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(
-          hBaseAccessor, metricsConf, metricMetadataManager, haController, 
cache);
-      scheduleAggregatorThread(secondClusterAggregator);
-
-      // Start the minute cluster aggregator
-      TimelineMetricAggregator minuteClusterAggregator =
-        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(
-          hBaseAccessor, metricsConf, metricMetadataManager, haController);
-      scheduleAggregatorThread(minuteClusterAggregator);
-
-      // Start the hourly cluster aggregator
-      TimelineMetricAggregator hourlyClusterAggregator =
-        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorHourly(
-          hBaseAccessor, metricsConf, metricMetadataManager, haController);
-      scheduleAggregatorThread(hourlyClusterAggregator);
-
-      // Start the daily cluster aggregator
-      TimelineMetricAggregator dailyClusterAggregator =
-        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorDaily(
-          hBaseAccessor, metricsConf, metricMetadataManager, haController);
-      scheduleAggregatorThread(dailyClusterAggregator);
-
-      // Start the minute host aggregator
-      if (configuration.isHostInMemoryAggregationEnabled()) {
-        LOG.info("timeline.metrics.host.inmemory.aggregation is set to True, 
switching to filtering host minute aggregation on collector");
-        TimelineMetricAggregator minuteHostAggregator =
-          
TimelineMetricAggregatorFactory.createFilteringTimelineMetricAggregatorMinute(
-            hBaseAccessor, metricsConf, metricMetadataManager, haController, 
postedAggregatedMap);
-        scheduleAggregatorThread(minuteHostAggregator);
-      } else {
-        TimelineMetricAggregator minuteHostAggregator =
-          TimelineMetricAggregatorFactory.createTimelineMetricAggregatorMinute(
-            hBaseAccessor, metricsConf, metricMetadataManager, haController);
-        scheduleAggregatorThread(minuteHostAggregator);
-      }
-
-      // Start the hourly host aggregator
-      TimelineMetricAggregator hourlyHostAggregator =
-        TimelineMetricAggregatorFactory.createTimelineMetricAggregatorHourly(
-          hBaseAccessor, metricsConf, metricMetadataManager, haController);
-      scheduleAggregatorThread(hourlyHostAggregator);
-
-      // Start the daily host aggregator
-      TimelineMetricAggregator dailyHostAggregator =
-        TimelineMetricAggregatorFactory.createTimelineMetricAggregatorDaily(
-          hBaseAccessor, metricsConf, metricMetadataManager, haController);
-      scheduleAggregatorThread(dailyHostAggregator);
-
-      if (!configuration.isTimelineMetricsServiceWatcherDisabled()) {
-        int initDelay = 
configuration.getTimelineMetricsServiceWatcherInitDelay();
-        int delay = configuration.getTimelineMetricsServiceWatcherDelay();
-        // Start the watchdog
-        watchdogExecutorService.scheduleWithFixedDelay(
-          new TimelineMetricStoreWatcher(this, configuration),
-          initDelay, delay, TimeUnit.SECONDS);
-        LOG.info("Started watchdog for timeline metrics store with initial " +
-          "delay = " + initDelay + ", delay = " + delay);
-      }
-      containerMetricsDisabled = configuration.isContainerMetricsDisabled();
-      isInitialized = true;
-    }
-
-  }
-
-  @Override
-  protected void serviceStop() throws Exception {
-    super.serviceStop();
-  }
-
-  @Override
-  public TimelineMetrics getTimelineMetrics(List<String> metricNames,
-      List<String> hostnames, String applicationId, String instanceId,
-      Long startTime, Long endTime, Precision precision, Integer limit,
-      boolean groupedByHosts, TopNConfig topNConfig, String 
seriesAggregateFunction) throws SQLException, IOException {
-
-    if (metricNames == null || metricNames.isEmpty()) {
-      throw new IllegalArgumentException("No metric name filter specified.");
-    }
-    if ((startTime == null && endTime != null)
-        || (startTime != null && endTime == null)) {
-      throw new IllegalArgumentException("Open ended query not supported ");
-    }
-    if (limit != null && limit > PhoenixHBaseAccessor.RESULTSET_LIMIT){
-      throw new IllegalArgumentException("Limit too big");
-    }
-
-    TimelineMetricsSeriesAggregateFunction seriesAggrFunctionInstance = null;
-    if (!StringUtils.isEmpty(seriesAggregateFunction)) {
-      SeriesAggregateFunction func = 
SeriesAggregateFunction.getFunction(seriesAggregateFunction);
-      seriesAggrFunctionInstance = 
TimelineMetricsSeriesAggregateFunctionFactory.newInstance(func);
-    }
-
-    Multimap<String, List<Function>> metricFunctions =
-      parseMetricNamesToAggregationFunctions(metricNames);
-
-    List<byte[]> uuids = 
metricMetadataManager.getUuids(metricFunctions.keySet(), hostnames, 
applicationId, instanceId);
-
-    ConditionBuilder conditionBuilder = new ConditionBuilder(new 
ArrayList<String>(metricFunctions.keySet()))
-      .hostnames(hostnames)
-      .appId(applicationId)
-      .instanceId(instanceId)
-      .startTime(startTime)
-      .endTime(endTime)
-      .precision(precision)
-      .limit(limit)
-      .grouped(groupedByHosts)
-      .uuid(uuids);
-
-    if (topNConfig != null) {
-      if (TopNCondition.isTopNHostCondition(metricNames, hostnames) ^ //Only 1 
condition should be true.
-        TopNCondition.isTopNMetricCondition(metricNames, hostnames)) {
-        conditionBuilder.topN(topNConfig.getTopN());
-        conditionBuilder.isBottomN(topNConfig.getIsBottomN());
-        Function.ReadFunction readFunction = 
Function.ReadFunction.getFunction(topNConfig.getTopNFunction());
-        Function function = new Function(readFunction, null);
-        conditionBuilder.topNFunction(function);
-      } else {
-        LOG.info("Invalid Input for TopN query. Ignoring TopN Request.");
-      }
-    } else if (startTime != null && hostnames != null && hostnames.size() > 
defaultTopNHostsLimit) {
-      // if (timeseries query AND hostnames passed AND size(hostnames) > limit)
-      LOG.info("Requesting data for more than " + defaultTopNHostsLimit +  " 
Hosts. " +
-        "Defaulting to Top " + defaultTopNHostsLimit);
-      conditionBuilder.topN(defaultTopNHostsLimit);
-      conditionBuilder.isBottomN(false);
-    }
-
-    Condition condition = conditionBuilder.build();
-
-    TimelineMetrics metrics;
-
-    if (hostnames == null || hostnames.isEmpty()) {
-      metrics = hBaseAccessor.getAggregateMetricRecords(condition, 
metricFunctions);
-    } else {
-      metrics = hBaseAccessor.getMetricRecords(condition, metricFunctions);
-    }
-
-    metrics = postProcessMetrics(metrics);
-
-    if (metrics.getMetrics().size() == 0) {
-      return metrics;
-    }
-
-    return seriesAggregateMetrics(seriesAggrFunctionInstance, metrics);
-  }
-
-  private TimelineMetrics postProcessMetrics(TimelineMetrics metrics) {
-    List<TimelineMetric> metricsList = metrics.getMetrics();
-
-    for (TimelineMetric metric : metricsList){
-      String name = metric.getMetricName();
-      if (name.contains("._rate")){
-        updateValuesAsRate(metric.getMetricValues(), false);
-      } else if (name.contains("._diff")) {
-        updateValuesAsRate(metric.getMetricValues(), true);
-      }
-    }
-
-    return metrics;
-  }
-
-  private TimelineMetrics 
seriesAggregateMetrics(TimelineMetricsSeriesAggregateFunction 
seriesAggrFuncInstance,
-      TimelineMetrics metrics) {
-    if (seriesAggrFuncInstance != null) {
-      TimelineMetric appliedMetric = seriesAggrFuncInstance.apply(metrics);
-      metrics.setMetrics(Collections.singletonList(appliedMetric));
-    }
-    return metrics;
-  }
-
-  static Map<Long, Double> updateValuesAsRate(Map<Long, Double> metricValues, 
boolean isDiff) {
-    Long prevTime = null;
-    Double prevVal = null;
-    long step;
-    Double diff;
-
-    for(Iterator<Map.Entry<Long, Double>> it = 
metricValues.entrySet().iterator(); it.hasNext(); ) {
-      Map.Entry<Long, Double> timeValueEntry = it.next();
-      Long currTime = timeValueEntry.getKey();
-      Double currVal = timeValueEntry.getValue();
-
-      if (prevTime != null) {
-        step = currTime - prevTime;
-        diff = currVal - prevVal;
-        if (diff < 0) {
-          it.remove(); //Discard calculating rate when the metric counter has 
been reset.
-        } else {
-          Double rate = isDiff ? diff : (diff / 
TimeUnit.MILLISECONDS.toSeconds(step));
-          timeValueEntry.setValue(rate);
-        }
-      } else {
-        it.remove();
-      }
-
-      prevTime = currTime;
-      prevVal = currVal;
-    }
-
-    return metricValues;
-  }
-
-  static Multimap<String, List<Function>> 
parseMetricNamesToAggregationFunctions(List<String> metricNames) {
-    Multimap<String, List<Function>> metricsFunctions = 
ArrayListMultimap.create();
-
-    for (String metricName : metricNames){
-      Function function = Function.DEFAULT_VALUE_FUNCTION;
-      String cleanMetricName = metricName;
-
-      try {
-        function = Function.fromMetricName(metricName);
-        int functionStartIndex = metricName.indexOf("._");
-        if (functionStartIndex > 0) {
-          cleanMetricName = metricName.substring(0, functionStartIndex);
-        }
-      } catch (Function.FunctionFormatException ffe){
-        // unknown function so
-        // fallback to VALUE, and fullMetricName
-      }
-
-      List<Function>  functionsList = new ArrayList<>();
-      functionsList.add(function);
-      metricsFunctions.put(cleanMetricName, functionsList);
-    }
-
-    return metricsFunctions;
-  }
-
-  @Override
-  public TimelinePutResponse putMetrics(TimelineMetrics metrics) throws 
SQLException, IOException {
-    // Error indicated by the Sql exception
-    TimelinePutResponse response = new TimelinePutResponse();
-
-    hBaseAccessor.insertMetricRecordsWithMetadata(metricMetadataManager, 
metrics, false);
-
-    if (configuration.isCollectorInMemoryAggregationEnabled()) {
-      cache.putMetrics(metrics.getMetrics(), metricMetadataManager);
-    }
-
-    return response;
-  }
-
-  @Override
-  public TimelinePutResponse putContainerMetrics(List<ContainerMetric> metrics)
-      throws SQLException, IOException {
-
-    if (containerMetricsDisabled) {
-      LOG.debug("Ignoring submitted container metrics according to 
configuration. Values will not be stored.");
-      return new TimelinePutResponse();
-    }
-
-    hBaseAccessor.insertContainerMetrics(metrics);
-    return new TimelinePutResponse();
-  }
-
-  @Override
-  public Map<String, List<TimelineMetricMetadata>> 
getTimelineMetricMetadata(String appId, String metricPattern,
-                                                                             
boolean includeBlacklistedMetrics) throws SQLException, IOException {
-    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
-      metricMetadataManager.getMetadataCache();
-
-    boolean filterByAppId = StringUtils.isNotEmpty(appId);
-    boolean filterByMetricName = StringUtils.isNotEmpty(metricPattern);
-    Pattern metricFilterPattern = null;
-    if (filterByMetricName) {
-      metricFilterPattern = Pattern.compile(metricPattern);
-    }
-
-    // Group Metadata by AppId
-    Map<String, List<TimelineMetricMetadata>> metadataByAppId = new 
HashMap<>();
-    for (TimelineMetricMetadata metricMetadata : metadata.values()) {
-
-      if (!includeBlacklistedMetrics && !metricMetadata.isWhitelisted()) {
-        continue;
-      }
-
-      String currentAppId = metricMetadata.getAppId();
-      if (filterByAppId && !currentAppId.equals(appId)) {
-        continue;
-      }
-
-      if (filterByMetricName) {
-        Matcher m = 
metricFilterPattern.matcher(metricMetadata.getMetricName());
-        if (!m.find()) {
-          continue;
-        }
-      }
-
-      List<TimelineMetricMetadata> metadataList = 
metadataByAppId.get(currentAppId);
-      if (metadataList == null) {
-        metadataList = new ArrayList<>();
-        metadataByAppId.put(currentAppId, metadataList);
-      }
-
-      metadataList.add(metricMetadata);
-    }
-
-    return metadataByAppId;
-  }
-
-  @Override
-  public byte[] getUuid(String metricName, String appId, String instanceId, 
String hostname) throws SQLException, IOException {
-    return metricMetadataManager.getUuid(metricName, appId, instanceId, 
hostname);
-  }
-
-  @Override
-  public Map<String, Set<String>> getHostAppsMetadata() throws SQLException, 
IOException {
-    Map<String, TimelineMetricHostMetadata> hostsMetadata = 
metricMetadataManager.getHostedAppsCache();
-    Map<String, Set<String>> hostAppMap = new HashMap<>();
-    for (String hostname : hostsMetadata.keySet()) {
-      hostAppMap.put(hostname, 
hostsMetadata.get(hostname).getHostedApps().keySet());
-    }
-    return hostAppMap;
-  }
-
-  @Override
-  public TimelinePutResponse putHostAggregatedMetrics(AggregationResult 
aggregationResult) throws SQLException, IOException {
-    Map<TimelineMetric, MetricHostAggregate> aggregateMap = new HashMap<>();
-    String hostname = null;
-    for (TimelineMetricWithAggregatedValues entry : 
aggregationResult.getResult()) {
-      aggregateMap.put(entry.getTimelineMetric(), entry.getMetricAggregate());
-      hostname = hostname == null ? entry.getTimelineMetric().getHostName() : 
hostname;
-      break;
-    }
-    long timestamp = aggregationResult.getTimeInMilis();
-    postedAggregatedMap.put(hostname, timestamp);
-    if (LOG.isDebugEnabled()) {
-      LOG.debug(String.format("Adding host %s to aggregated by in-memory 
aggregator. Timestamp : %s", hostname, timestamp));
-    }
-    hBaseAccessor.saveHostAggregateRecords(aggregateMap, 
PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME);
-
-
-    return new TimelinePutResponse();
-  }
-
-  @Override
-  public Map<String, Map<String,Set<String>>> getInstanceHostsMetadata(String 
instanceId, String appId) throws SQLException, IOException {
-    Map<String, Set<String>> hostedApps = getHostAppsMetadata();
-    Map<String, Set<String>> instanceHosts = 
metricMetadataManager.getHostedInstanceCache();
-    if (configuration.getTimelineMetricsMultipleClusterSupport()) {
-      instanceHosts = metricMetadataManager.getHostedInstanceCache();
-    }
-    Map<String, Map<String, Set<String>>> instanceAppHosts = new HashMap<>();
-
-    if (MapUtils.isEmpty(instanceHosts)) {
-      Map<String, Set<String>> appHostMap = new HashMap<String, Set<String>>();
-      for (String host : hostedApps.keySet()) {
-        for (String app : hostedApps.get(host)) {
-          if (!appHostMap.containsKey(app)) {
-            appHostMap.put(app, new HashSet<String>());
-          }
-          appHostMap.get(app).add(host);
-        }
-      }
-      instanceAppHosts.put("", appHostMap);
-    } else {
-      for (String instance : instanceHosts.keySet()) {
-
-        if (StringUtils.isNotEmpty(instanceId) && 
!instance.equals(instanceId)) {
-          continue;
-        }
-        Map<String, Set<String>> appHostMap = new  HashMap<String, 
Set<String>>();
-        instanceAppHosts.put(instance, appHostMap);
-
-        Set<String> hostsWithInstance = instanceHosts.get(instance);
-        for (String host : hostsWithInstance) {
-          for (String app : hostedApps.get(host)) {
-            if (StringUtils.isNotEmpty(appId) && !app.equals(appId)) {
-              continue;
-            }
-
-            if (!appHostMap.containsKey(app)) {
-              appHostMap.put(app, new HashSet<String>());
-            }
-            appHostMap.get(app).add(host);
-          }
-        }
-      }
-    }
-
-    return instanceAppHosts;
-  }
-
-  @Override
-  public List<String> getLiveInstances() {
-
-    List<String> instances = null;
-    try {
-        if (haController == null) {
-          // Always return current host as live (embedded operation mode)
-          return 
Collections.singletonList(configuration.getInstanceHostnameFromEnv());
-        }
-        instances = haController.getLiveInstanceHostNames();
-        if (instances == null || instances.isEmpty()) {
-          // fallback
-          instances = 
Collections.singletonList(configuration.getInstanceHostnameFromEnv());
-        }
-      } catch (UnknownHostException e) {
-        LOG.debug("Exception on getting hostname from env.", e);
-      }
-    return instances;
-  }
-
-  private void scheduleAggregatorThread(final TimelineMetricAggregator 
aggregator) {
-    if (!aggregator.isDisabled()) {
-      ScheduledExecutorService executorService = 
Executors.newSingleThreadScheduledExecutor(
-        new ThreadFactory() {
-          @Override
-          public Thread newThread(Runnable r) {
-            return new Thread(r, 
ACTUAL_AGGREGATOR_NAMES.get(aggregator.getName()));
-          }
-        }
-      );
-      scheduledExecutors.put(aggregator.getName(), executorService);
-      executorService.scheduleAtFixedRate(aggregator,
-        0l,
-        aggregator.getSleepIntervalMillis(),
-        TimeUnit.MILLISECONDS);
-      LOG.info("Scheduled aggregator thread " + aggregator.getName() + " every 
" +
-        + aggregator.getSleepIntervalMillis() + " milliseconds.");
-    } else {
-      LOG.info("Skipped scheduling " + aggregator.getName() + " since it is 
disabled.");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsCacheCommitterThread.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsCacheCommitterThread.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsCacheCommitterThread.java
deleted file mode 100644
index d858b84..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsCacheCommitterThread.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class MetricsCacheCommitterThread implements Runnable {
-
-    private static final Log LOG = 
LogFactory.getLog(MetricsCacheCommitterThread.class);
-    private static PhoenixHBaseAccessor phoenixHBaseAccessor;
-
-    public MetricsCacheCommitterThread(PhoenixHBaseAccessor 
phoenixHBaseAccessor) {
-        this.phoenixHBaseAccessor = phoenixHBaseAccessor;
-    }
-    @Override
-    public void run() {
-        LOG.debug("Checking if metrics cache is empty");
-        if (!phoenixHBaseAccessor.isInsertCacheEmpty()) {
-            phoenixHBaseAccessor.commitMetricsFromCache();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsSystemInitializationException.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsSystemInitializationException.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsSystemInitializationException.java
deleted file mode 100644
index 7bd4a32..0000000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsSystemInitializationException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline;
-
-/**
- * RuntimeException for initialization of metrics schema. It is 
RuntimeException
- * since this is a not recoverable situation, and should be handled by main or
- * service method followed by shutdown.
- */
-public class MetricsSystemInitializationException extends RuntimeException {
-  public MetricsSystemInitializationException() {
-  }
-
-  public MetricsSystemInitializationException(String msg) {
-    super(msg);
-  }
-
-  public MetricsSystemInitializationException(Throwable t) {
-    super(t);
-  }
-
-  public MetricsSystemInitializationException(String msg, Throwable t) {
-    super(msg, t);
-  }
-
-}

Reply via email to