http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
deleted file mode 100644
index 71f4b5e..0000000
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The MetricsTimeVaryingRate class is for a rate based metric that
- * naturally varies over time (e.g. time taken to create a file).
- * The rate is averaged at each interval heart beat (the interval
- * is set in the metrics config file).
- * This class also keeps track of the min and max rates along with 
- * a method to reset the min-max.
- *
- * @deprecated Use org.apache.hadoop.metrics2 package instead.
- */
-@Deprecated
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-public class MetricsTimeVaryingRate extends MetricsBase {
-
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
-
-  static class Metrics {
-    int numOperations = 0;
-    long time = 0;  // total time or average time
-
-    void set(final Metrics resetTo) {
-      numOperations = resetTo.numOperations;
-      time = resetTo.time;
-    }
-    
-    void reset() {
-      numOperations = 0;
-      time = 0;
-    }
-  }
-  
-  static class MinMax {
-    long minTime = -1;
-    long maxTime = 0;
-    
-    void set(final MinMax newVal) {
-      minTime = newVal.minTime;
-      maxTime = newVal.maxTime;
-    }
-    
-    void reset() {
-      minTime = -1;
-      maxTime = 0;
-    }
-    void update(final long time) { // update min max
-      minTime = (minTime == -1) ? time : Math.min(minTime, time);
-      minTime = Math.min(minTime, time);
-      maxTime = Math.max(maxTime, time);
-    }
-  }
-  private Metrics currentData;
-  private Metrics previousIntervalData;
-  private MinMax minMax;
-  
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   */
-  public MetricsTimeVaryingRate(final String nam, final MetricsRegistry 
registry, final String description) {
-    super(nam, description);
-    currentData = new Metrics();
-    previousIntervalData = new Metrics();
-    minMax = new MinMax();
-    registry.add(nam, this);
-  }
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsTimeVaryingRate(final String nam, MetricsRegistry registry) {
-    this(nam, registry, NO_DESCRIPTION);
-
-  }
-  
-  
-  /**
-   * Increment the metrics for numOps operations
-   * @param numOps - number of operations
-   * @param time - time for numOps operations
-   */
-  public synchronized void inc(final int numOps, final long time) {
-    currentData.numOperations += numOps;
-    currentData.time += time;
-    long timePerOps = time/numOps;
-    minMax.update(timePerOps);
-  }
-  
-  /**
-   * Increment the metrics for one operation
-   * @param time for one operation
-   */
-  public synchronized void inc(final long time) {
-    currentData.numOperations++;
-    currentData.time += time;
-    minMax.update(time);
-  }
-  
-  
-
-  private synchronized void intervalHeartBeat() {
-     previousIntervalData.numOperations = currentData.numOperations;
-     previousIntervalData.time = (currentData.numOperations == 0) ?
-                             0 : currentData.time / currentData.numOperations;
-     currentData.reset();
-  }
-  
-  /**
-   * Push the delta  metrics to the mr.
-   * The delta is since the last push/interval.
-   * 
-   * Note this does NOT push to JMX
-   * (JMX gets the info via {@link #getPreviousIntervalAverageTime()} and
-   * {@link #getPreviousIntervalNumOps()}
-   *
-   * @param mr
-   */
-  @Override
-  public synchronized void pushMetric(final MetricsRecord mr) {
-    intervalHeartBeat();
-    try {
-      mr.incrMetric(getName() + "_num_ops", getPreviousIntervalNumOps());
-      mr.setMetric(getName() + "_avg_time", getPreviousIntervalAverageTime());
-    } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" , e);
-    }
-  }
-  
-  /**
-   * The number of operations in the previous interval
-   * @return - ops in prev interval
-   */
-  public synchronized int getPreviousIntervalNumOps() { 
-    return previousIntervalData.numOperations;
-  }
-  
-  /**
-   * The average rate of an operation in the previous interval
-   * @return - the average rate.
-   */
-  public synchronized long getPreviousIntervalAverageTime() {
-    return previousIntervalData.time;
-  } 
-  
-  /**
-   * The min time for a single operation since the last reset
-   *  {@link #resetMinMax()}
-   * @return min time for an operation
-   */
-  public synchronized long getMinTime() {
-    return  minMax.minTime;
-  }
-  
-  /**
-   * The max time for a single operation since the last reset
-   *  {@link #resetMinMax()}
-   * @return max time for an operation
-   */
-  public synchronized long getMaxTime() {
-    return minMax.maxTime;
-  }
-  
-  /**
-   * Reset the min max values
-   */
-  public synchronized void resetMinMax() {
-    minMax.reset();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/package-info.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/package-info.java
deleted file mode 100644
index 46dac34..0000000
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-package org.apache.hadoop.metrics.util;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
index 3ed89a8..f3ac5da 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
@@ -323,7 +323,7 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
   }
 
   /**
-   * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
+   * Verify the access for /logs, /stacks, /conf, and /logLevel
    * servlets, when authentication filters are set, but authorization is not
    * enabled.
    * @throws Exception 
@@ -349,7 +349,7 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
     myServer.start();
     String serverURL = "http://"; + 
NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
     for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
+        "logLevel" }) {
       for (String user : new String[] { "userA", "userB" }) {
         assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
             + servlet, user));
@@ -359,8 +359,8 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
   }
 
   /**
-   * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
-   * /metrics servlets.
+   * Verify the administrator access for /logs, /stacks, /conf, and /logLevel
+   * servlets.
    * 
    * @throws Exception
    */
@@ -393,7 +393,7 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
     String serverURL = "http://";
         + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
     for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
+        "logLevel" }) {
       for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
         assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
             + servlet, user));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
deleted file mode 100644
index b2ea9df..0000000
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.metrics.MetricsServlet.TagsMetricsPair;
-import org.apache.hadoop.metrics.spi.NoEmitMetricsContext;
-import org.apache.hadoop.metrics.spi.OutputRecord;
-import org.mortbay.util.ajax.JSON;
-
-@Deprecated
-public class TestMetricsServlet extends TestCase {
-  MetricsContext nc1;
-  MetricsContext nc2;
-  // List containing nc1 and nc2.
-  List<MetricsContext> contexts;
-  OutputRecord outputRecord;
-  
-  /**
-   * Initializes, for testing, two NoEmitMetricsContext's, and adds one value 
-   * to the first of them.
-   */
-  @Override
-  public void setUp() throws IOException {
-    nc1 = new NoEmitMetricsContext();
-    nc1.init("test1", ContextFactory.getFactory());
-    nc2 = new NoEmitMetricsContext();
-    nc2.init("test2", ContextFactory.getFactory());
-    contexts = new ArrayList<MetricsContext>();
-    contexts.add(nc1);
-    contexts.add(nc2);
-
-    MetricsRecord r = nc1.createRecord("testRecord");
-    
-    r.setTag("testTag1", "testTagValue1");
-    r.setTag("testTag2", "testTagValue2");
-    r.setMetric("testMetric1", 1);
-    r.setMetric("testMetric2", 33);
-    r.update();
-
-    Map<String, Collection<OutputRecord>> m = nc1.getAllRecords();
-    assertEquals(1, m.size());
-    assertEquals(1, m.values().size());
-    Collection<OutputRecord> outputRecords = m.values().iterator().next();
-    assertEquals(1, outputRecords.size());
-    outputRecord = outputRecords.iterator().next();
-  }
-  
- 
-  
-  public void testTagsMetricsPair() throws IOException {
-    TagsMetricsPair pair = new TagsMetricsPair(outputRecord.getTagsCopy(), 
-        outputRecord.getMetricsCopy());
-    String s = JSON.toString(pair);
-    assertEquals(
-        "[{\"testTag1\":\"testTagValue1\",\"testTag2\":\"testTagValue2\"},"+
-        "{\"testMetric1\":1,\"testMetric2\":33}]", s);
-  }
-  
-  public void testGetMap() throws IOException {
-    MetricsServlet servlet = new MetricsServlet();
-    Map<String, Map<String, List<TagsMetricsPair>>> m = 
servlet.makeMap(contexts);
-    assertEquals("Map missing contexts", 2, m.size());
-    assertTrue(m.containsKey("test1"));
-   
-    Map<String, List<TagsMetricsPair>> m2 = m.get("test1");
-    
-    assertEquals("Missing records", 1, m2.size());
-    assertTrue(m2.containsKey("testRecord"));
-    assertEquals("Wrong number of tags-values pairs.", 1, 
m2.get("testRecord").size());
-  }
-  
-  public void testPrintMap() throws IOException {
-    StringWriter sw = new StringWriter();
-    PrintWriter out = new PrintWriter(sw);
-    MetricsServlet servlet = new MetricsServlet();
-    servlet.printMap(out, servlet.makeMap(contexts));
-    
-    String EXPECTED = "" +
-      "test1\n" +
-      "  testRecord\n" +
-      "    {testTag1=testTagValue1,testTag2=testTagValue2}:\n" +
-      "      testMetric1=1\n" +
-      "      testMetric2=33\n" +
-      "test2\n";
-    assertEquals(EXPECTED, sw.toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/ganglia/TestGangliaContext.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/ganglia/TestGangliaContext.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/ganglia/TestGangliaContext.java
deleted file mode 100644
index bae3ec0..0000000
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/ganglia/TestGangliaContext.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * TestGangliaContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.hadoop.metrics.ganglia;
-
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
-
-import java.net.MulticastSocket;
-
-@Deprecated
-public class TestGangliaContext {
-  @Test
-  public void testShouldCreateDatagramSocketByDefault() throws Exception {
-    GangliaContext context = new GangliaContext();
-    context.init("gangliaContext", ContextFactory.getFactory());
-    assertFalse("Created MulticastSocket", context.datagramSocket instanceof 
MulticastSocket);
-  }
-
-  @Test
-  public void testShouldCreateDatagramSocketIfMulticastIsDisabled() throws 
Exception {
-    GangliaContext context = new GangliaContext();
-    ContextFactory factory = ContextFactory.getFactory();
-    factory.setAttribute("gangliaContext.multicast", "false");
-    context.init("gangliaContext", factory);
-    assertFalse("Created MulticastSocket", context.datagramSocket instanceof 
MulticastSocket);
-  }
-
-  @Test
-  public void testShouldCreateMulticastSocket() throws Exception {
-    GangliaContext context = new GangliaContext();
-    ContextFactory factory = ContextFactory.getFactory();
-    factory.setAttribute("gangliaContext.multicast", "true");
-    context.init("gangliaContext", factory);
-    assertTrue("Did not create MulticastSocket", context.datagramSocket 
instanceof MulticastSocket);
-    MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket;
-    assertEquals("Did not set default TTL", multicastSocket.getTimeToLive(), 
1);
-  }
-
-  @Test
-  public void testShouldSetMulticastSocketTtl() throws Exception {
-    GangliaContext context = new GangliaContext();
-    ContextFactory factory = ContextFactory.getFactory();
-    factory.setAttribute("gangliaContext.multicast", "true");
-    factory.setAttribute("gangliaContext.multicast.ttl", "10");
-    context.init("gangliaContext", factory);
-    MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket;
-    assertEquals("Did not set TTL", multicastSocket.getTimeToLive(), 10);
-  }
-  
-  @Test
-  public void testCloseShouldCloseTheSocketWhichIsCreatedByInit() throws 
Exception {
-    AbstractMetricsContext context=new GangliaContext();
-    context.init("gangliaContext", ContextFactory.getFactory());
-    GangliaContext gangliaContext =(GangliaContext) context;
-    assertFalse("Socket already 
closed",gangliaContext.datagramSocket.isClosed());
-    context.close();
-    assertTrue("Socket not closed",gangliaContext.datagramSocket.isClosed());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/spi/TestOutputRecord.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/spi/TestOutputRecord.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/spi/TestOutputRecord.java
deleted file mode 100644
index 9747cb9..0000000
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/spi/TestOutputRecord.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.spi;
-
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
-
-import junit.framework.TestCase;
-
-@Deprecated
-public class TestOutputRecord extends TestCase {
-  public void testCopy() {
-    TagMap tags = new TagMap();
-    tags.put("tagkey", "tagval");
-    MetricMap metrics = new MetricMap();
-    metrics.put("metrickey", 123.4);
-    OutputRecord r = new OutputRecord(tags, metrics);
-    
-    assertEquals(tags, r.getTagsCopy());    
-    assertNotSame(tags, r.getTagsCopy());
-    assertEquals(metrics, r.getMetricsCopy());
-    assertNotSame(metrics, r.getMetricsCopy());
-  } 
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to