http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
deleted file mode 100644
index b3fdef9..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.rest;
-
-import com.google.common.base.Preconditions;
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.UniformInterfaceException;
-import com.sun.jersey.api.client.config.ClientConfig;
-import com.sun.jersey.api.client.config.DefaultClientConfig;
-import com.sun.jersey.api.json.JSONConfiguration;
-import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
-import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
-import org.apache.hadoop.conf.Configuration;
-import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-
-/**
- * Class to bond to a Jersey client, for UGI integration and SPNEGO.
- * <p>
- *   Usage: create an instance, then when creating a Jersey <code>Client</code>
- *   pass in to the constructor the handler provided by {@link #getHandler()}
- *
- * see <a 
href="https://jersey.java.net/apidocs/1.17/jersey/com/sun/jersey/client/urlconnection/HttpURLConnectionFactory.html";>Jersey
 docs</a>
- */
-public class UgiJerseyBinding implements
-    HttpURLConnectionFactory {
-  private static final Logger log =
-      LoggerFactory.getLogger(UgiJerseyBinding.class);
-
-  private final UrlConnectionOperations operations;
-  private final URLConnectionClientHandler handler;
-
-  /**
-   * Construct an instance
-   * @param operations operations instance
-   */
-  @SuppressWarnings("ThisEscapedInObjectConstruction")
-  public UgiJerseyBinding(UrlConnectionOperations operations) {
-    Preconditions.checkArgument(operations != null, "Null operations");
-    this.operations = operations;
-    handler = new URLConnectionClientHandler(this);
-  }
-
-  /**
-   * Create an instance off the configuration. The SPNEGO policy
-   * is derived from the current UGI settings.
-   * @param conf config
-   */
-  public UgiJerseyBinding(Configuration conf) {
-    this(new UrlConnectionOperations(conf));
-  }
-
-  /**
-   * Get a URL connection. 
-   * @param url URL to connect to
-   * @return the connection
-   * @throws IOException any problem. {@link AuthenticationException} 
-   * errors are wrapped
-   */
-  @Override
-  public HttpURLConnection getHttpURLConnection(URL url) throws IOException {
-    try {
-      // open a connection handling status codes and so redirections
-      // but as it opens a connection, it's less useful than you think.
-
-      return operations.openConnection(url);
-    } catch (AuthenticationException e) {
-      throw new IOException(e);
-    }
-  }
-
-  public UrlConnectionOperations getOperations() {
-    return operations;
-  }
-
-  public URLConnectionClientHandler getHandler() {
-    return handler;
-  }
-  
-  /**
-   * Get the SPNEGO flag (as found in the operations instance
-   * @return the spnego policy
-   */
-  public boolean isUseSpnego() {
-    return operations.isUseSpnego();
-  }
-
-
-  /**
-   * Uprate error codes 400 and up into faults; 
-   * <p>
-   * see {@link ExceptionConverter#convertJerseyException(String, String, 
UniformInterfaceException)}
-   */
-  public static IOException uprateFaults(HttpVerb verb, String url,
-      UniformInterfaceException ex)
-      throws IOException {
-    return ExceptionConverter.convertJerseyException(verb.getVerb(),
-        url, ex);
-  }
-
-  /**
-   * Create the standard Jersey client Config
-   * @return the recommended Jersey Client config
-   */
-  public ClientConfig createJerseyClientConfig() {
-    ClientConfig clientConfig = new DefaultClientConfig();
-    clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, 
true);
-    return clientConfig;
-  }
-
-  /**
-   * Create a jersey client bonded to this handler, using the
-   * supplied client config
-   * @param clientConfig client configuratin
-   * @return a new client instance to use
-   */
-  public Client createJerseyClient(ClientConfig clientConfig) {
-    return new Client(getHandler(), clientConfig);
-  }
-
-  /**
-   * Create a jersey client bonded to this handler, using the
-   * client config created with {@link #createJerseyClientConfig()}
-   * @return a new client instance to use
-   */
-  public Client createJerseyClient() {
-    return createJerseyClient(createJerseyClientConfig());
-  }
-
-}
-
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
deleted file mode 100644
index d7f768e..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.rest;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.security.UserGroupInformation;
-import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-
-/**
- * Operations on the JDK UrlConnection class.
- *
- */
-public class UrlConnectionOperations extends Configured  {
-  private static final Logger log =
-      LoggerFactory.getLogger(UrlConnectionOperations.class);
-
-  private SliderURLConnectionFactory connectionFactory;
-
-  private boolean useSpnego = false;
-
-  /**
-   * Create an instance off the configuration. The SPNEGO policy
-   * is derived from the current UGI settings.
-   * @param conf config
-   */
-  public UrlConnectionOperations(Configuration conf) {
-    super(conf);
-    connectionFactory = SliderURLConnectionFactory.newInstance(conf);
-    if (UserGroupInformation.isSecurityEnabled()) {
-      log.debug("SPNEGO is enabled");
-      setUseSpnego(true);
-    }
-  }
-
-
-  public boolean isUseSpnego() {
-    return useSpnego;
-  }
-
-  public void setUseSpnego(boolean useSpnego) {
-    this.useSpnego = useSpnego;
-  }
-
-  /**
-   * Opens a url with cache disabled, redirect handled in 
-   * (JDK) implementation.
-   *
-   * @param url to open
-   * @return URLConnection
-   * @throws IOException
-   * @throws AuthenticationException authentication failure
-   */
-  public HttpURLConnection openConnection(URL url) throws
-      IOException,
-      AuthenticationException {
-    Preconditions.checkArgument(url.getPort() != 0, "no port");
-    return (HttpURLConnection) connectionFactory.openConnection(url, 
useSpnego);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
deleted file mode 100644
index 98a76ea..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.service.ServiceContext;
-import org.apache.hadoop.yarn.service.component.Component;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
-import org.apache.hadoop.yarn.service.component.ComponentEvent;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEvent;
-import org.apache.hadoop.yarn.service.component.ComponentState;
-import org.apache.hadoop.yarn.service.servicemonitor.probe.ProbeStatus;
-import org.apache.hadoop.yarn.service.utils.SliderUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import static 
org.apache.hadoop.yarn.service.compinstance.ComponentInstanceState.RUNNING_BUT_UNREADY;
-import static org.apache.hadoop.yarn.service.component.ComponentEventType.FLEX;
-import static 
org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEventType.BECOME_NOT_READY;
-import static 
org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEventType.BECOME_READY;
-import static 
org.apache.hadoop.yarn.service.compinstance.ComponentInstanceState.READY;
-import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.CONTAINER_FAILURE_WINDOW;
-import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.DEFAULT_READINESS_CHECK_INTERVAL;
-import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.READINESS_CHECK_INTERVAL;
-
-public class ServiceMonitor extends AbstractService {
-
-  private static final Logger LOG =
-      LoggerFactory.getLogger(ServiceMonitor.class);
-
-  public ScheduledExecutorService executorService;
-  private  Map<ContainerId, ComponentInstance> liveInstances = null;
-  private ServiceContext context;
-  private Configuration conf;
-
-  public ServiceMonitor(String name, ServiceContext context) {
-    super(name);
-    liveInstances = context.scheduler.getLiveInstances();
-    this.context = context;
-  }
-
-  @Override
-  public void serviceInit(Configuration conf) throws Exception {
-    executorService = Executors.newScheduledThreadPool(1);
-    this.conf = conf;
-    super.serviceInit(conf);
-  }
-
-  @Override
-  public void serviceStart() throws Exception {
-    long readinessCheckInterval = YarnServiceConf
-        .getLong(READINESS_CHECK_INTERVAL, DEFAULT_READINESS_CHECK_INTERVAL,
-            context.application.getConfiguration(), conf);
-
-    executorService
-        .scheduleAtFixedRate(new ReadinessChecker(), readinessCheckInterval,
-            readinessCheckInterval, TimeUnit.SECONDS);
-
-    // Default 6 hours.
-    long failureResetInterval = YarnServiceConf
-        .getLong(CONTAINER_FAILURE_WINDOW, 21600,
-            context.application.getConfiguration(), conf);
-
-    executorService
-        .scheduleAtFixedRate(new ContainerFailureReset(), failureResetInterval,
-            failureResetInterval, TimeUnit.SECONDS);
-  }
-
-  @Override
-  public void serviceStop() throws Exception {
-    if (executorService != null) {
-      executorService.shutdownNow();
-    }
-  }
-
-  private class ReadinessChecker implements Runnable {
-
-    @Override
-    public void run() {
-
-      // check if the comp instance are ready
-      for (Map.Entry<ContainerId, ComponentInstance> entry : liveInstances
-          .entrySet()) {
-        ComponentInstance instance = entry.getValue();
-
-        ProbeStatus status = instance.ping();
-        if (status.isSuccess()) {
-          if (instance.getState() == RUNNING_BUT_UNREADY) {
-            // synchronously update the state.
-            instance.handle(
-                new ComponentInstanceEvent(entry.getKey(), BECOME_READY));
-          }
-        } else {
-          if (instance.getState() == READY) {
-            instance.handle(
-                new ComponentInstanceEvent(entry.getKey(), BECOME_NOT_READY));
-          }
-        }
-      }
-
-      for (Component component : context.scheduler.getAllComponents()
-          .values()) {
-        // If comp hasn't started yet and its dependencies are satisfied
-        if (component.getState() == ComponentState.INIT && component
-            .areDependenciesReady()) {
-          LOG.info("[COMPONENT {}]: Dependencies satisfied, ramping up.",
-              component.getName());
-          ComponentEvent event = new ComponentEvent(component.getName(), FLEX)
-              
.setDesired(component.getComponentSpec().getNumberOfContainers());
-          component.handle(event);
-        }
-      }
-    }
-  }
-
-  private class ContainerFailureReset implements Runnable {
-    @Override
-    public void run() {
-      for (Component component : 
context.scheduler.getAllComponents().values()) {
-        component.resetCompFailureCount();
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
deleted file mode 100644
index 10c1160..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.hadoop.yarn.service.utils.SliderUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Map;
-
-public class HttpProbe extends Probe {
-  protected static final Logger log = LoggerFactory.getLogger(HttpProbe.class);
-
-  private static final String HOST_TOKEN = "${THIS_HOST}";
-
-  private final String urlString;
-  private final int timeout;
-  private final int min, max;
-
-
-  public HttpProbe(String url, int timeout, int min, int max, Configuration
-      conf) {
-    super("Http probe of " + url + " [" + min + "-" + max + "]", conf);
-    this.urlString = url;
-    this.timeout = timeout;
-    this.min = min;
-    this.max = max;
-  }
-
-  public static HttpProbe create(Map<String, String> props)
-      throws IOException {
-    String urlString = getProperty(props, WEB_PROBE_URL, null);
-    new URL(urlString);
-    int timeout = getPropertyInt(props, WEB_PROBE_CONNECT_TIMEOUT,
-        WEB_PROBE_CONNECT_TIMEOUT_DEFAULT);
-    int minSuccess = getPropertyInt(props, WEB_PROBE_MIN_SUCCESS,
-        WEB_PROBE_MIN_SUCCESS_DEFAULT);
-    int maxSuccess = getPropertyInt(props, WEB_PROBE_MAX_SUCCESS,
-        WEB_PROBE_MAX_SUCCESS_DEFAULT);
-    return new HttpProbe(urlString, timeout, minSuccess, maxSuccess, null);
-  }
-
-
-  private static HttpURLConnection getConnection(URL url, int timeout) throws
-      IOException {
-    HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-    connection.setInstanceFollowRedirects(true);
-    connection.setConnectTimeout(timeout);
-    return connection;
-  }
-
-  @Override
-  public ProbeStatus ping(ComponentInstance instance) {
-    ProbeStatus status = new ProbeStatus();
-    ContainerStatus containerStatus = instance.getContainerStatus();
-    if (containerStatus == null || 
SliderUtils.isEmpty(containerStatus.getIPs())
-        || StringUtils.isEmpty(containerStatus.getHost())) {
-      status.fail(this, new IOException("IP is not available yet"));
-      return status;
-    }
-
-    String ip = containerStatus.getIPs().get(0);
-    HttpURLConnection connection = null;
-    try {
-      URL url = new URL(urlString.replace(HOST_TOKEN, ip));
-      connection = getConnection(url, this.timeout);
-      int rc = connection.getResponseCode();
-      if (rc < min || rc > max) {
-        String error = "Probe " + url + " error code: " + rc;
-        log.info(error);
-        status.fail(this,
-            new IOException(error));
-      } else {
-        status.succeed(this);
-      }
-    } catch (Throwable e) {
-      String error = "Probe " + urlString + " failed for IP " + ip + ": " + e;
-      log.info(error, e);
-      status.fail(this,
-          new IOException(error, e));
-    } finally {
-      if (connection != null) {
-        connection.disconnect();
-      }
-    }
-    return status;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
deleted file mode 100644
index b575d69..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-/**
- * Build up log entries for ease of splunk
- */
-public class LogEntryBuilder {
-
-  private final StringBuilder builder = new StringBuilder();
-
-  public LogEntryBuilder() {
-  }
-
-  public LogEntryBuilder(String text) {
-    elt(text);
-  }
-
-
-  public LogEntryBuilder(String name, Object value) {
-    entry(name, value);
-  }
-
-  public LogEntryBuilder elt(String text) {
-    addComma();
-    builder.append(text);
-    return this;
-  }
-
-  public LogEntryBuilder elt(String name, Object value) {
-    addComma();
-    entry(name, value);
-    return this;
-  }
-
-  private void addComma() {
-    if (!isEmpty()) {
-      builder.append(", ");
-    }
-  }
-
-  private void entry(String name, Object value) {
-    builder.append(name).append('=');
-    if (value != null) {
-      builder.append('"').append(value.toString()).append('"');
-    } else {
-      builder.append("null");
-    }
-  }
-
-  @Override
-  public String toString() {
-    return builder.toString();
-  }
-
-  private boolean isEmpty() {
-    return builder.length() == 0;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
deleted file mode 100644
index f5f3d99..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-/**
- * Config keys for monitoring
- */
-public interface MonitorKeys {
-
-  /**
-   * Port probing key : port to attempt to create a TCP connection to {@value}.
-   */
-  String PORT_PROBE_PORT = "port";
-  /**
-   * Port probing key : timeout for the the connection attempt {@value}.
-   */
-  String PORT_PROBE_CONNECT_TIMEOUT = "timeout";
-  /**
-   * Port probing default : timeout for the connection attempt {@value}.
-   */
-  int PORT_PROBE_CONNECT_TIMEOUT_DEFAULT = 1000;
-
-  /**
-   * Web probing key : URL {@value}.
-   */
-  String WEB_PROBE_URL = "url";
-  /**
-   * Web probing key : min success code {@value}.
-   */
-  String WEB_PROBE_MIN_SUCCESS = "min.success";
-  /**
-   * Web probing key : max success code {@value}.
-   */
-  String WEB_PROBE_MAX_SUCCESS = "max.success";
-  /**
-   * Web probing default : min successful response code {@value}.
-   */
-  int WEB_PROBE_MIN_SUCCESS_DEFAULT = 200;
-  /**
-   * Web probing default : max successful response code {@value}.
-   */
-  int WEB_PROBE_MAX_SUCCESS_DEFAULT = 299;
-  /**
-   * Web probing key : timeout for the connection attempt {@value}
-   */
-  String WEB_PROBE_CONNECT_TIMEOUT = "timeout";
-  /**
-   * Port probing default : timeout for the connection attempt {@value}.
-   */
-  int WEB_PROBE_CONNECT_TIMEOUT_DEFAULT = 1000;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
deleted file mode 100644
index 46d1fdb..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-import org.apache.hadoop.yarn.service.api.records.ReadinessCheck;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Formatter;
-import java.util.Locale;
-
-/**
- * Various utils to work with the monitor
- */
-public final class MonitorUtils {
-  protected static final Logger LOG = LoggerFactory.getLogger(MonitorUtils
-      .class);
-
-  private MonitorUtils() {
-  }
-
-  public static String toPlural(int val) {
-    return val != 1 ? "s" : "";
-  }
-
-  /**
-   * Convert milliseconds to human time -the exact format is unspecified
-   * @param milliseconds a time in milliseconds
-   * @return a time that is converted to human intervals
-   */
-  public static String millisToHumanTime(long milliseconds) {
-    StringBuilder sb = new StringBuilder();
-    // Send all output to the Appendable object sb
-    Formatter formatter = new Formatter(sb, Locale.US);
-
-    long s = Math.abs(milliseconds / 1000);
-    long m = Math.abs(milliseconds % 1000);
-    if (milliseconds > 0) {
-      formatter.format("%d.%03ds", s, m);
-    } else if (milliseconds == 0) {
-      formatter.format("0");
-    } else {
-      formatter.format("-%d.%03ds", s, m);
-    }
-    return sb.toString();
-  }
-
-  public static Probe getProbe(ReadinessCheck readinessCheck) {
-    if (readinessCheck == null) {
-      return null;
-    }
-    if (readinessCheck.getType() == null) {
-      return null;
-    }
-    try {
-      switch (readinessCheck.getType()) {
-      case HTTP:
-        return HttpProbe.create(readinessCheck.getProps());
-      case PORT:
-        return PortProbe.create(readinessCheck.getProps());
-      default:
-        return null;
-      }
-    } catch (Throwable t) {
-      throw new IllegalArgumentException("Error creating readiness check " +
-          t);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
deleted file mode 100644
index f6cf3ae..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.hadoop.yarn.service.utils.SliderUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.Socket;
-import java.util.Map;
-
-/**
- * Probe for a port being open.
- */
-public class PortProbe extends Probe {
-  protected static final Logger log = LoggerFactory.getLogger(PortProbe.class);
-  private final int port;
-  private final int timeout;
-
-  public PortProbe(int port, int timeout) {
-    super("Port probe of " + port + " for " + timeout + "ms", null);
-    this.port = port;
-    this.timeout = timeout;
-  }
-
-  public static PortProbe create(Map<String, String> props)
-      throws IOException {
-    int port = getPropertyInt(props, PORT_PROBE_PORT, null);
-
-    if (port >= 65536) {
-      throw new IOException(PORT_PROBE_PORT + " " + port + " is out of " +
-          "range");
-    }
-
-    int timeout = getPropertyInt(props, PORT_PROBE_CONNECT_TIMEOUT,
-        PORT_PROBE_CONNECT_TIMEOUT_DEFAULT);
-
-    return new PortProbe(port, timeout);
-  }
-
-  /**
-   * Try to connect to the (host,port); a failure to connect within
-   * the specified timeout is a failure.
-   * @param instance role instance
-   * @return the outcome
-   */
-  @Override
-  public ProbeStatus ping(ComponentInstance instance) {
-    ProbeStatus status = new ProbeStatus();
-
-    if (instance.getContainerStatus() == null || SliderUtils
-        .isEmpty(instance.getContainerStatus().getIPs())) {
-      status.fail(this, new IOException(
-          instance.getCompInstanceName() + ": IP is not available yet"));
-      return status;
-    }
-
-    String ip = instance.getContainerStatus().getIPs().get(0);
-    InetSocketAddress sockAddr = new InetSocketAddress(ip, port);
-    Socket socket = new Socket();
-    try {
-      if (log.isDebugEnabled()) {
-        log.debug(instance.getCompInstanceName() + ": Connecting " + sockAddr
-            .toString() + ", timeout=" + MonitorUtils
-            .millisToHumanTime(timeout));
-      }
-      socket.connect(sockAddr, timeout);
-      status.succeed(this);
-    } catch (Throwable e) {
-      String error =
-          instance.getCompInstanceName() + ": Probe " + sockAddr + " failed";
-      log.debug(error, e);
-      status.fail(this, new IOException(error, e));
-    } finally {
-      IOUtils.closeSocket(socket);
-    }
-    return status;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/Probe.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/Probe.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/Probe.java
deleted file mode 100644
index b851fb7..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/Probe.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Base class of all probes.
- */
-public abstract class Probe implements MonitorKeys {
-
-  protected final Configuration conf;
-  private String name;
-
-  /**
-   * Create a probe of a specific name
-   *
-   * @param name probe name
-   * @param conf configuration being stored.
-   */
-  public Probe(String name, Configuration conf) {
-    this.name = name;
-    this.conf = conf;
-  }
-
-
-  protected void setName(String name) {
-    this.name = name;
-  }
-
-  public String getName() {
-    return name;
-  }
-
-
-  @Override
-  public String toString() {
-    return getName();
-  }
-
-  public static String getProperty(Map<String, String> props, String name,
-      String defaultValue) throws IOException {
-    String value = props.get(name);
-    if (StringUtils.isEmpty(value)) {
-      if (defaultValue == null) {
-        throw new IOException(name + " not specified");
-      }
-      return defaultValue;
-    }
-    return value;
-  }
-
-  public static int getPropertyInt(Map<String, String> props, String name,
-      Integer defaultValue) throws IOException {
-    String value = props.get(name);
-    if (StringUtils.isEmpty(value)) {
-      if (defaultValue == null) {
-        throw new IOException(name + " not specified");
-      }
-      return defaultValue;
-    }
-    return Integer.parseInt(value);
-  }
-
-  /**
-   * perform any prelaunch initialization
-   */
-  public void init() throws IOException {
-
-  }
-
-  /**
-   * Ping the endpoint. All exceptions must be caught and included in the
-   * (failure) status.
-   *
-   * @param instance instance to ping
-   * @return the status
-   */
-  public abstract ProbeStatus ping(ComponentInstance instance);
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/ProbeStatus.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/ProbeStatus.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/ProbeStatus.java
deleted file mode 100644
index 7cd761c..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/ProbeStatus.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.servicemonitor.probe;
-
-import java.io.Serializable;
-import java.util.Date;
-
-/**
- * Status message of a probe. This is designed to be sent over the wire, 
though the exception
- * Had better be unserializable at the far end if that is to work.
- */
-public final class ProbeStatus implements Serializable {
-  private static final long serialVersionUID = 165468L;
-
-  private long timestamp;
-  private String timestampText;
-  private boolean success;
-  private boolean realOutcome;
-  private String message;
-  private Throwable thrown;
-  private transient Probe originator;
-
-  public ProbeStatus() {
-  }
-
-  public ProbeStatus(long timestamp, String message, Throwable thrown) {
-    this.success = false;
-    this.message = message;
-    this.thrown = thrown;
-    setTimestamp(timestamp);
-  }
-
-  public ProbeStatus(long timestamp, String message) {
-    this.success = true;
-    setTimestamp(timestamp);
-    this.message = message;
-    this.thrown = null;
-  }
-
-  public long getTimestamp() {
-    return timestamp;
-  }
-
-  public void setTimestamp(long timestamp) {
-    this.timestamp = timestamp;
-    timestampText = new Date(timestamp).toString();
-  }
-
-  public boolean isSuccess() {
-    return success;
-  }
-
-  /**
-   * Set both the success and the real outcome bits to the same value
-   * @param success the new value
-   */
-  public void setSuccess(boolean success) {
-    this.success = success;
-    realOutcome = success;
-  }
-
-  public String getTimestampText() {
-    return timestampText;
-  }
-
-  public boolean getRealOutcome() {
-    return realOutcome;
-  }
-
-  public String getMessage() {
-    return message;
-  }
-
-  public void setMessage(String message) {
-    this.message = message;
-  }
-
-  public Throwable getThrown() {
-    return thrown;
-  }
-
-  public void setThrown(Throwable thrown) {
-    this.thrown = thrown;
-  }
-
-  /**
-   * Get the probe that generated this result. May be null
-   * @return a possibly null reference to a probe
-   */
-  public Probe getOriginator() {
-    return originator;
-  }
-
-  /**
-   * The probe has succeeded -capture the current timestamp, set
-   * success to true, and record any other data needed.
-   * @param probe probe
-   */
-  public void succeed(Probe probe) {
-    finish(probe, true, probe.getName(), null);
-  }
-
-  /**
-   * A probe has failed either because the test returned false, or an exception
-   * was thrown. The {@link #success} field is set to false, any exception 
-   * thrown is recorded.
-   * @param probe probe that failed
-   * @param thrown an exception that was thrown.
-   */
-  public void fail(Probe probe, Throwable thrown) {
-    finish(probe, false, "Failure in " + probe, thrown);
-  }
-
-  public void finish(Probe probe, boolean succeeded, String text, Throwable 
thrown) {
-    setTimestamp(System.currentTimeMillis());
-    setSuccess(succeeded);
-    originator = probe;
-    message = text;
-    this.thrown = thrown;
-  }
-
-  @Override
-  public String toString() {
-    LogEntryBuilder builder = new LogEntryBuilder("Probe Status");
-    builder.elt("time", timestampText)
-           .elt("outcome", (success ? "success" : "failure"));
-
-    if (success != realOutcome) {
-      builder.elt("originaloutcome", (realOutcome ? "success" : "failure"));
-    }
-    builder.elt("message", message);
-    if (thrown != null) {
-      builder.elt("exception", thrown);
-    }
-
-    return builder.toString();
-  }
-
-  /**
-   * Flip the success bit on while the real outcome bit is kept false
-   */
-  public void markAsSuccessful() {
-    success = true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceMetricsSink.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceMetricsSink.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceMetricsSink.java
deleted file mode 100644
index cf4e836..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceMetricsSink.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.timelineservice;
-
-import org.apache.commons.configuration2.SubsetConfiguration;
-import org.apache.hadoop.metrics2.MetricsRecord;
-import org.apache.hadoop.metrics2.MetricsSink;
-import org.apache.hadoop.metrics2.MetricsTag;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Write the metrics to a ATSv2. Generally, this class is instantiated via
- * hadoop-metrics2 property files. Specifically, you would create this class by
- * adding the following to by This would actually be set as: <code>
- * [prefix].sink.[some instance name].class
- * =org.apache.hadoop.yarn.service.timelineservice.ServiceMetricsSink
- * </code>, where <tt>prefix</tt> is "atsv2": and <tt>some instance name</tt> 
is
- * just any unique name, so properties can be differentiated if there are
- * multiple sinks of the same type created
- */
-public class ServiceMetricsSink implements MetricsSink {
-
-  private static final Logger log =
-      LoggerFactory.getLogger(ServiceMetricsSink.class);
-
-  private ServiceTimelinePublisher serviceTimelinePublisher;
-
-  public ServiceMetricsSink() {
-
-  }
-
-  public ServiceMetricsSink(ServiceTimelinePublisher publisher) {
-    serviceTimelinePublisher = publisher;
-  }
-
-  /**
-   * Publishes service and component metrics to ATS.
-   */
-  @Override
-  public void putMetrics(MetricsRecord record) {
-    if (serviceTimelinePublisher.isStopped()) {
-      log.warn("ServiceTimelinePublisher has stopped. "
-          + "Not publishing any more metrics to ATS.");
-      return;
-    }
-
-    boolean isServiceMetrics = false;
-    boolean isComponentMetrics = false;
-    String appId = null;
-    for (MetricsTag tag : record.tags()) {
-      if (tag.name().equals("type") && tag.value().equals("service")) {
-        isServiceMetrics = true;
-      } else if (tag.name().equals("type") && tag.value().equals("component")) 
{
-        isComponentMetrics = true;
-        break; // if component metrics, no more information required from tag 
so
-               // break the loop
-      } else if (tag.name().equals("appId")) {
-        appId = tag.value();
-      }
-    }
-
-    if (isServiceMetrics && appId != null) {
-      if (log.isDebugEnabled()) {
-        log.debug("Publishing service metrics. " + record);
-      }
-      serviceTimelinePublisher.publishMetrics(record.metrics(), appId,
-          ServiceTimelineEntityType.SERVICE_ATTEMPT.toString(),
-          record.timestamp());
-    } else if (isComponentMetrics) {
-      if (log.isDebugEnabled()) {
-        log.debug("Publishing Component metrics. " + record);
-      }
-      serviceTimelinePublisher.publishMetrics(record.metrics(), record.name(),
-          ServiceTimelineEntityType.COMPONENT.toString(), record.timestamp());
-    }
-  }
-
-  @Override
-  public void init(SubsetConfiguration conf) {
-  }
-
-  @Override
-  public void flush() {
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEntityType.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEntityType.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEntityType.java
deleted file mode 100644
index d5c9539..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEntityType.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.timelineservice;
-
-/**
- * Slider entities that are published to ATS.
- */
-public enum ServiceTimelineEntityType {
-  /**
-   * Used for publishing service entity information.
-   */
-  SERVICE_ATTEMPT,
-
-  /**
-   * Used for publishing component entity information.
-   */
-  COMPONENT,
-
-  /**
-   * Used for publishing component instance entity information.
-   */
-  COMPONENT_INSTANCE
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEvent.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEvent.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEvent.java
deleted file mode 100644
index 7f7f9a1..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineEvent.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.timelineservice;
-
-/**
- * Events that are used to store in ATS.
- */
-public enum ServiceTimelineEvent {
-  SERVICE_ATTEMPT_REGISTERED,
-
-  SERVICE_ATTEMPT_UNREGISTERED,
-
-  COMPONENT_INSTANCE_REGISTERED,
-
-  COMPONENT_INSTANCE_UNREGISTERED,
-
-  COMPONENT_INSTANCE_UPDATED
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineMetricsConstants.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineMetricsConstants.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineMetricsConstants.java
deleted file mode 100644
index 78a7171..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelineMetricsConstants.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.timelineservice;
-
-/**
- * Constants which are stored as key in ATS
- */
-public final class ServiceTimelineMetricsConstants {
-
-  public static final String URI = "URI";
-
-  public static final String NAME = "NAME";
-
-  public static final String STATE = "STATE";
-
-  public static final String EXIT_STATUS_CODE = "EXIT_STATUS_CODE";
-
-  public static final String EXIT_REASON = "EXIT_REASON";
-
-  public static final String DIAGNOSTICS_INFO = "DIAGNOSTICS_INFO";
-
-  public static final String LAUNCH_TIME = "LAUNCH_TIME";
-
-  public static final String QUICK_LINKS = "QUICK_LINKS";
-
-  public static final String LAUNCH_COMMAND = "LAUNCH_COMMAND";
-
-  public static final String TOTAL_CONTAINERS = "NUMBER_OF_CONTAINERS";
-
-  public static final String RUNNING_CONTAINERS =
-      "NUMBER_OF_RUNNING_CONTAINERS";
-
-  /**
-   * Artifacts constants.
-   */
-  public static final String ARTIFACT_ID = "ARTIFACT_ID";
-
-  public static final String ARTIFACT_TYPE = "ARTIFACT_TYPE";
-
-  public static final String ARTIFACT_URI = "ARTIFACT_URI";
-
-  /**
-   * Resource constants.
-   */
-  public static final String RESOURCE_CPU = "RESOURCE_CPU";
-
-  public static final String RESOURCE_MEMORY = "RESOURCE_MEMORY";
-
-  public static final String RESOURCE_PROFILE = "RESOURCE_PROFILE";
-
-  /**
-   * component instance constants.
-   */
-  public static final String IP = "IP";
-
-  public static final String HOSTNAME = "HOSTNAME";
-
-  public static final String BARE_HOST = "BARE_HOST";
-
-  public static final String COMPONENT_NAME = "COMPONENT_NAME";
-
-  public static final String COMPONENT_INSTANCE_NAME = 
"COMPONENT_INSTANCE_NAME";
-
-  /**
-   * component constants.
-   */
-  public static final String DEPENDENCIES = "DEPENDENCIES";
-
-  public static final String DESCRIPTION = "DESCRIPTION";
-
-  public static final String RUN_PRIVILEGED_CONTAINER =
-      "RUN_PRIVILEGED_CONTAINER";
-
-  public static final String PLACEMENT_POLICY = "PLACEMENT_POLICY";
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelinePublisher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelinePublisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelinePublisher.java
deleted file mode 100644
index 243baea..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/ServiceTimelinePublisher.java
+++ /dev/null
@@ -1,368 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.timelineservice;
-
-import org.apache.hadoop.metrics2.AbstractMetric;
-import org.apache.hadoop.service.CompositeService;
-import org.apache.hadoop.yarn.api.records.ContainerState;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
-import org.apache.hadoop.yarn.client.api.TimelineV2Client;
-import org.apache.hadoop.yarn.service.ServiceContext;
-import org.apache.hadoop.yarn.service.api.records.Application;
-import org.apache.hadoop.yarn.service.api.records.Component;
-import org.apache.hadoop.yarn.service.api.records.ConfigFile;
-import org.apache.hadoop.yarn.service.api.records.Configuration;
-import org.apache.hadoop.yarn.service.api.records.Container;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import static 
org.apache.hadoop.yarn.service.timelineservice.ServiceTimelineMetricsConstants.DIAGNOSTICS_INFO;
-
-/**
- * A single service that publishes all the Timeline Entities.
- */
-public class ServiceTimelinePublisher extends CompositeService {
-
-  // Number of bytes of config which can be published in one shot to ATSv2.
-  public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
-
-  private TimelineV2Client timelineClient;
-
-  private volatile boolean stopped = false;
-
-  private static final Logger log =
-      LoggerFactory.getLogger(ServiceTimelinePublisher.class);
-
-  @Override
-  protected void serviceInit(org.apache.hadoop.conf.Configuration 
configuration)
-      throws Exception {
-    addService(timelineClient);
-  }
-
-
-  @Override
-  protected void serviceStop() throws Exception {
-    stopped = true;
-    super.serviceStop();
-  }
-
-  public boolean isStopped() {
-    return stopped;
-  }
-
-  public ServiceTimelinePublisher(TimelineV2Client client) {
-    super(ServiceTimelinePublisher.class.getName());
-    timelineClient = client;
-  }
-
-  public void serviceAttemptRegistered(Application application,
-      org.apache.hadoop.conf.Configuration systemConf) {
-    long currentTimeMillis = application.getLaunchTime() == null
-        ? System.currentTimeMillis() : application.getLaunchTime().getTime();
-
-    TimelineEntity entity = createServiceAttemptEntity(application.getId());
-    entity.setCreatedTime(currentTimeMillis);
-
-    // create info keys
-    Map<String, Object> entityInfos = new HashMap<String, Object>();
-    entityInfos.put(ServiceTimelineMetricsConstants.NAME, 
application.getName());
-    entityInfos.put(ServiceTimelineMetricsConstants.STATE,
-        application.getState().toString());
-    entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_TIME,
-        currentTimeMillis);
-    entity.addInfo(ServiceTimelineMetricsConstants.QUICK_LINKS,
-        application.getQuicklinks());
-    entity.addInfo(entityInfos);
-
-    // add an event
-    TimelineEvent startEvent = new TimelineEvent();
-    
startEvent.setId(ServiceTimelineEvent.SERVICE_ATTEMPT_REGISTERED.toString());
-    startEvent.setTimestamp(currentTimeMillis);
-    entity.addEvent(startEvent);
-
-    // publish before configurations published
-    putEntity(entity);
-
-    // publish system config - YarnConfiguration
-    populateTimelineEntity(systemConf.iterator(), application.getId(),
-        ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
-    // publish user conf
-    publishUserConf(application.getConfiguration(), application.getId(),
-        ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
-
-    // publish component as separate entity.
-    publishComponents(application.getComponents());
-  }
-
-  public void serviceAttemptUpdated(Application application) {
-    TimelineEntity entity = createServiceAttemptEntity(application.getId());
-    entity.addInfo(ServiceTimelineMetricsConstants.QUICK_LINKS,
-        application.getQuicklinks());
-    putEntity(entity);
-  }
-
-  public void serviceAttemptUnregistered(ServiceContext context,
-      String diagnostics) {
-    TimelineEntity entity = createServiceAttemptEntity(
-        context.attemptId.getApplicationId().toString());
-    Map<String, Object> entityInfos = new HashMap<String, Object>();
-    entityInfos.put(ServiceTimelineMetricsConstants.STATE,
-        FinalApplicationStatus.ENDED);
-    entityInfos.put(DIAGNOSTICS_INFO, diagnostics);
-    entity.addInfo(entityInfos);
-
-    // add an event
-    TimelineEvent finishEvent = new TimelineEvent();
-    finishEvent
-        .setId(ServiceTimelineEvent.SERVICE_ATTEMPT_UNREGISTERED.toString());
-    finishEvent.setTimestamp(System.currentTimeMillis());
-    entity.addEvent(finishEvent);
-
-    putEntity(entity);
-  }
-
-  public void componentInstanceStarted(Container container,
-      ComponentInstance instance) {
-
-    TimelineEntity entity = createComponentInstanceEntity(container.getId());
-    entity.setCreatedTime(container.getLaunchTime().getTime());
-
-    // create info keys
-    Map<String, Object> entityInfos = new HashMap<String, Object>();
-    entityInfos.put(ServiceTimelineMetricsConstants.BARE_HOST,
-        container.getBareHost());
-    entityInfos.put(ServiceTimelineMetricsConstants.STATE,
-        container.getState().toString());
-    entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_TIME,
-        container.getLaunchTime().getTime());
-    entityInfos.put(ServiceTimelineMetricsConstants.COMPONENT_NAME,
-        instance.getCompName());
-    entityInfos.put(ServiceTimelineMetricsConstants.COMPONENT_INSTANCE_NAME,
-        instance.getCompInstanceName());
-    entity.addInfo(entityInfos);
-
-    // add an event
-    TimelineEvent startEvent = new TimelineEvent();
-    startEvent
-        .setId(ServiceTimelineEvent.COMPONENT_INSTANCE_REGISTERED.toString());
-    startEvent.setTimestamp(container.getLaunchTime().getTime());
-    entity.addEvent(startEvent);
-
-    putEntity(entity);
-  }
-
-  public void componentInstanceFinished(ComponentInstance instance,
-      int exitCode, ContainerState state, String diagnostics) {
-    TimelineEntity entity = createComponentInstanceEntity(
-        instance.getContainer().getId().toString());
-
-    // create info keys
-    Map<String, Object> entityInfos = new HashMap<String, Object>();
-    entityInfos.put(ServiceTimelineMetricsConstants.EXIT_STATUS_CODE,
-        exitCode);
-    entityInfos.put(DIAGNOSTICS_INFO, diagnostics);
-    entityInfos.put(ServiceTimelineMetricsConstants.STATE, state);
-    entity.addInfo(entityInfos);
-
-    // add an event
-    TimelineEvent startEvent = new TimelineEvent();
-    startEvent
-        
.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_UNREGISTERED.toString());
-    startEvent.setTimestamp(System.currentTimeMillis());
-    entity.addEvent(startEvent);
-
-    putEntity(entity);
-  }
-
-  public void componentInstanceUpdated(Container container) {
-    TimelineEntity entity = createComponentInstanceEntity(container.getId());
-
-    // create info keys
-    Map<String, Object> entityInfos = new HashMap<String, Object>();
-    entityInfos.put(ServiceTimelineMetricsConstants.IP, container.getIp());
-    entityInfos.put(ServiceTimelineMetricsConstants.HOSTNAME,
-        container.getHostname());
-    entityInfos.put(ServiceTimelineMetricsConstants.STATE,
-        container.getState().toString());
-    entity.addInfo(entityInfos);
-
-    TimelineEvent updateEvent = new TimelineEvent();
-    updateEvent
-        .setId(ServiceTimelineEvent.COMPONENT_INSTANCE_UPDATED.toString());
-    updateEvent.setTimestamp(System.currentTimeMillis());
-    entity.addEvent(updateEvent);
-
-    putEntity(entity);
-  }
-
-  private void publishComponents(List<Component> components) {
-    long currentTimeMillis = System.currentTimeMillis();
-    for (Component component : components) {
-      TimelineEntity entity = createComponentEntity(component.getName());
-      entity.setCreatedTime(currentTimeMillis);
-
-      // create info keys
-      Map<String, Object> entityInfos = new HashMap<String, Object>();
-      entityInfos.put(ServiceTimelineMetricsConstants.ARTIFACT_ID,
-          component.getArtifact().getId());
-      entityInfos.put(ServiceTimelineMetricsConstants.ARTIFACT_TYPE,
-          component.getArtifact().getType().toString());
-      if (component.getResource().getProfile() != null) {
-        entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_PROFILE,
-            component.getResource().getProfile());
-      }
-      entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_CPU,
-          component.getResource().getCpus());
-      entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_MEMORY,
-          component.getResource().getMemory());
-
-      if (component.getLaunchCommand() != null) {
-        entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_COMMAND,
-            component.getLaunchCommand());
-      }
-      entityInfos.put(ServiceTimelineMetricsConstants.RUN_PRIVILEGED_CONTAINER,
-          component.getRunPrivilegedContainer().toString());
-      if (component.getPlacementPolicy() != null) {
-        entityInfos.put(ServiceTimelineMetricsConstants.PLACEMENT_POLICY,
-            component.getPlacementPolicy().getLabel());
-      }
-      entity.addInfo(entityInfos);
-
-      putEntity(entity);
-
-      // publish component specific configurations
-      publishUserConf(component.getConfiguration(), component.getName(),
-          ServiceTimelineEntityType.COMPONENT.toString());
-    }
-  }
-
-  private void publishUserConf(Configuration configuration,
-      String entityId, String entityType) {
-    populateTimelineEntity(configuration.getProperties().entrySet().iterator(),
-        entityId, entityType);
-
-    populateTimelineEntity(configuration.getEnv().entrySet().iterator(),
-        entityId, entityType);
-
-    for (ConfigFile configFile : configuration.getFiles()) {
-      populateTimelineEntity(configFile.getProps().entrySet().iterator(),
-          entityId, entityType);
-    }
-  }
-
-  private void populateTimelineEntity(Iterator<Entry<String, String>> iterator,
-      String entityId, String entityType) {
-    int configSize = 0;
-    TimelineEntity entity = createTimelineEntity(entityId, entityType);
-    while (iterator.hasNext()) {
-      Entry<String, String> entry = iterator.next();
-      int size = entry.getKey().length() + entry.getValue().length();
-      configSize += size;
-      // Configs are split into multiple entities if they exceed 100kb in size.
-      if (configSize > ATS_CONFIG_PUBLISH_SIZE_BYTES) {
-        if (entity.getConfigs().size() > 0) {
-          putEntity(entity);
-          entity = createTimelineEntity(entityId, entityType);
-        }
-        configSize = size;
-      }
-      entity.addConfig(entry.getKey(), entry.getValue());
-    }
-    if (configSize > 0) {
-      putEntity(entity);
-    }
-  }
-
-  /**
-   * Called from ServiceMetricsSink at regular interval of time.
-   * @param metrics of service or components
-   * @param entityId Id of entity
-   * @param entityType Type of entity
-   * @param timestamp
-   */
-  public void publishMetrics(Iterable<AbstractMetric> metrics, String entityId,
-      String entityType, long timestamp) {
-    TimelineEntity entity = createTimelineEntity(entityId, entityType);
-    Set<TimelineMetric> entityMetrics = new HashSet<TimelineMetric>();
-    for (AbstractMetric metric : metrics) {
-      TimelineMetric timelineMetric = new TimelineMetric();
-      timelineMetric.setId(metric.name());
-      timelineMetric.addValue(timestamp, metric.value());
-      entityMetrics.add(timelineMetric);
-    }
-    entity.setMetrics(entityMetrics);
-    putEntity(entity);
-  }
-
-  private TimelineEntity createServiceAttemptEntity(String serviceId) {
-    TimelineEntity entity = createTimelineEntity(serviceId,
-        ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
-    return entity;
-  }
-
-  private TimelineEntity createComponentInstanceEntity(String instanceId) {
-    TimelineEntity entity = createTimelineEntity(instanceId,
-        ServiceTimelineEntityType.COMPONENT_INSTANCE.toString());
-    return entity;
-  }
-
-  private TimelineEntity createComponentEntity(String componentId) {
-    TimelineEntity entity = createTimelineEntity(componentId,
-        ServiceTimelineEntityType.COMPONENT.toString());
-    return entity;
-  }
-
-  private TimelineEntity createTimelineEntity(String entityId,
-      String entityType) {
-    TimelineEntity entity = new TimelineEntity();
-    entity.setId(entityId);
-    entity.setType(entityType);
-    return entity;
-  }
-
-  private void putEntity(TimelineEntity entity) {
-    try {
-      if (log.isDebugEnabled()) {
-        log.debug("Publishing the entity " + entity + ", JSON-style content: "
-            + TimelineUtils.dumpTimelineRecordtoJSON(entity));
-      }
-      if (timelineClient != null) {
-        timelineClient.putEntitiesAsync(entity);
-      } else {
-        log.error("Seems like client has been removed before the entity "
-            + "could be published for " + entity);
-      }
-    } catch (Exception e) {
-      log.error("Error when publishing entity " + entity, e);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/package-info.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/package-info.java
deleted file mode 100644
index 72f7842..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/timelineservice/package-info.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * ATS implementation
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-package org.apache.hadoop.yarn.service.timelineservice;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ApplicationReportSerDeser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ApplicationReportSerDeser.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ApplicationReportSerDeser.java
deleted file mode 100644
index 2607c08..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ApplicationReportSerDeser.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.utils;
-
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.JsonMappingException;
-
-import java.io.IOException;
-
-/**
- * Persistence of {@link SerializedApplicationReport}
- * 
- */
-public class ApplicationReportSerDeser
-    extends JsonSerDeser<SerializedApplicationReport> {
-  public ApplicationReportSerDeser() {
-    super(SerializedApplicationReport.class);
-  }
-
-
-  private static final ApplicationReportSerDeser
-      staticinstance = new ApplicationReportSerDeser();
-
-  /**
-   * Convert an instance to a JSON string -sync access to a shared ser/deser
-   * object instance
-   * @param instance object to convert
-   * @return a JSON string description
-   * @throws JsonParseException parse problems
-   * @throws JsonMappingException O/J mapping problems
-   */
-  public static String toString(SerializedApplicationReport instance)
-      throws IOException, JsonGenerationException, JsonMappingException {
-    synchronized (staticinstance) {
-      return staticinstance.toJson(instance);
-    }
-  }
- 
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ClientRegistryBinder.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ClientRegistryBinder.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ClientRegistryBinder.java
deleted file mode 100644
index 86896b2..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ClientRegistryBinder.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.utils;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.fs.PathNotFoundException;
-import org.apache.hadoop.registry.client.api.RegistryConstants;
-import org.apache.hadoop.registry.client.api.RegistryOperations;
-import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
-import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
-import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
-import org.apache.hadoop.registry.client.impl.zk.RegistryInternalConstants;
-import org.apache.hadoop.registry.client.types.Endpoint;
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.List;
-
-import static 
org.apache.hadoop.registry.client.binding.RegistryPathUtils.encodeForRegistry;
-import static 
org.apache.hadoop.registry.client.binding.RegistryUtils.convertUsername;
-import static 
org.apache.hadoop.registry.client.binding.RegistryUtils.getCurrentUsernameUnencoded;
-import static 
org.apache.hadoop.registry.client.binding.RegistryUtils.servicePath;
-
-/**
- * Generic code to get the URLs for clients via the registry
- */
-public class ClientRegistryBinder {
-  private static final Logger log =
-      LoggerFactory.getLogger(ClientRegistryBinder.class);
-
-  private final RegistryOperations operations;
-
-  public ClientRegistryBinder(RegistryOperations operations) {
-    this.operations = operations;
-  }
-
-  /**
-   * Buld the user path -switches to the system path if the user is "".
-   * It also cross-converts the username to ascii via punycode
-   * @param username username or ""
-   * @return the path to the user
-   */
-  public static String homePathForUser(String username) {
-    Preconditions.checkArgument(username != null, "null user");
-
-    // catch recursion
-    if (username.startsWith(RegistryConstants.PATH_USERS)) {
-      return username;
-    }
-
-    if (username.isEmpty()) {
-      return RegistryConstants.PATH_SYSTEM_SERVICES;
-    }
-
-    // convert username to registry name
-    String convertedName = convertUsername(username);
-
-    return RegistryPathUtils.join(RegistryConstants.PATH_USERS,
-        encodeForRegistry(convertedName));
-  }
-
-  /**
-   * Get the current username, before any encoding has been applied.
-   * @return the current user from the kerberos identity, falling back
-   * to the user and/or env variables.
-   */
-  public static String currentUsernameUnencoded() {
-    String env_hadoop_username = System.getenv(
-        RegistryInternalConstants.HADOOP_USER_NAME);
-    return getCurrentUsernameUnencoded(env_hadoop_username);
-  }
-
-  /**
-   * Qualify a user.
-   * <ol>
-   *   <li> <code>"~"</code> maps to user home path home</li>
-   *   <li> <code>"~user"</code> maps to <code>/users/$user</code></li>
-   *   <li> <code>"/"</code> maps to <code>/services/</code></li>
-   * </ol>
-   * @param user the username
-   * @return the base path
-   */
-  public static String qualifyUser(String user) {
-    // qualify the user
-    String t = user.trim();
-    if (t.startsWith("/")) {
-      // already resolved
-      return t;
-    } else if (t.equals("~")) {
-      // self
-      return currentUsernameUnencoded();
-    } else if (t.startsWith("~")) {
-      // another user
-      // convert username to registry name
-      String convertedName = convertUsername(t.substring(1));
-
-      return RegistryPathUtils.join(RegistryConstants.PATH_USERS,
-          encodeForRegistry(convertedName));
-    } else {
-      return "/" + t;
-    }
-  }
-
-  /**
-   * Look up an external REST API
-   * @param user user which will be qualified as per {@link 
#qualifyUser(String)}
-   * @param serviceClass service class
-   * @param instance instance name
-   * @param api API
-   * @return the API, or an exception is raised.
-   * @throws IOException
-   */
-  public String lookupExternalRestAPI(String user,
-      String serviceClass,
-      String instance,
-      String api)
-      throws IOException {
-    String qualified = qualifyUser(user);
-    String path = servicePath(qualified, serviceClass, instance);
-    String restAPI = resolveExternalRestAPI(api, path);
-    if (restAPI == null) {
-      throw new PathNotFoundException(path + " API " + api);
-    }
-    return restAPI;
-  }
-
-  /**
-   * Resolve a service record then return an external REST API exported it.
-   *
-   * @param api API to resolve
-   * @param path path of the service record
-   * @return null if the record exists but the API is absent or it has no
-   * REST endpoints.
-   * @throws IOException resolution problems, as covered in
-   * {@link RegistryOperations#resolve(String)}
-   */
-  protected String resolveExternalRestAPI(String api, String path) throws
-      IOException {
-    ServiceRecord record = operations.resolve(path);
-    return lookupRestAPI(record, api, true);
-  }
-
-  /**
-   * Look up an external REST API endpoint
-   * @param record service record
-   * @param api URI of api
-   * @param external flag to indicate this is an external record
-   * @return the first endpoint of the implementation, or null if there
-   * is no entry for the API, implementation or it's the wrong type.
-   */
-  public static String lookupRestAPI(ServiceRecord record,
-      String api, boolean external) throws InvalidRecordException {
-    try {
-      String url = null;
-      Endpoint endpoint = getEndpoint(record, api, external);
-      List<String> addresses =
-          RegistryTypeUtils.retrieveAddressesUriType(endpoint);
-      if (addresses != null && !addresses.isEmpty()) {
-        url = addresses.get(0);
-      }
-      return url;
-    } catch (InvalidRecordException e) {
-      log.debug("looking for API {}", api, e);
-      return null;
-    }
-  }
-
-  /**
-   * Get an endpont by API
-   * @param record service record
-   * @param api API
-   * @param external flag to indicate this is an external record
-   * @return the endpoint or null
-   */
-  public static Endpoint getEndpoint(ServiceRecord record,
-      String api,
-      boolean external) {
-    return external ? record.getExternalEndpoint(api)
-                    : record.getInternalEndpoint(api);
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14ac03e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Comparators.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Comparators.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Comparators.java
deleted file mode 100644
index 9f0e5d4..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Comparators.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.utils;
-
-import java.io.Serializable;
-import java.util.Comparator;
-
-/**
- * Some general comparators
- */
-public class Comparators {
-
-  public static class LongComparator implements Comparator<Long>, Serializable 
{
-    @Override
-    public int compare(Long o1, Long o2) {
-      return o1.compareTo(o2);
-    }
-  }
-
-  public static class InvertedLongComparator
-      implements Comparator<Long>, Serializable {
-    @Override
-    public int compare(Long o1, Long o2) {
-      return o2.compareTo(o1);
-    }
-  }
-
-  /**
-   * Little template class to reverse any comparitor
-   * @param <CompareType> the type that is being compared
-   */
-  public static class ComparatorReverser<CompareType> implements 
Comparator<CompareType>,
-      Serializable {
-
-    final Comparator<CompareType> instance;
-
-    public ComparatorReverser(Comparator<CompareType> instance) {
-      this.instance = instance;
-    }
-
-    @Override
-    public int compare(CompareType first, CompareType second) {
-      return instance.compare(second, first);
-    }
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to