Repository: hive
Updated Branches:
  refs/heads/master 23478cfeb -> 7dc701c59


HIVE-17563: CodahaleMetrics.JsonFileReporter is not updating 
hive.service.metrics.file.location (Alexander Kolbasov, reviewed by Sahil 
Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7dc701c5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7dc701c5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7dc701c5

Branch: refs/heads/master
Commit: 7dc701c592d2083c2e05f06172788c18912d71ae
Parents: 23478cf
Author: Alexander Kolbasov <ak...@cloudera.com>
Authored: Fri Sep 29 16:51:01 2017 -0700
Committer: Sahil Takiar <stak...@cloudera.com>
Committed: Fri Sep 29 16:51:32 2017 -0700

----------------------------------------------------------------------
 .../metrics2/JsonFileMetricsReporter.java       | 192 +++++++++++--------
 .../metrics/metrics2/TestCodahaleMetrics.java   |  86 ++++++---
 .../hive/metastore/metrics/JsonReporter.java    | 131 ++++++++-----
 .../hive/metastore/metrics/TestMetrics.java     |  75 ++------
 4 files changed, 277 insertions(+), 207 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7dc701c5/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java
----------------------------------------------------------------------
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java
 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java
index c07517a..96243cb 100644
--- 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java
+++ 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java
@@ -23,114 +23,156 @@ import com.codahale.metrics.json.MetricsModule;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedWriter;
+import java.io.FileWriter;
 import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.attribute.FileAttribute;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
+import java.util.Set;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 
 /**
- * A metrics reporter for CodahaleMetrics that dumps metrics periodically into 
a file in JSON format.
+ * A metrics reporter for CodahaleMetrics that dumps metrics periodically into
+ * a file in JSON format. Only files on local filesystems are supported.
  */
-
-public class JsonFileMetricsReporter implements CodahaleReporter {
+public class JsonFileMetricsReporter implements CodahaleReporter, Runnable {
+  //
+  // Implementation notes.
+  //
+  // 1. Since only local file systems are supported, there is no need to use 
Hadoop
+  //    version of Path class.
+  // 2. java.nio package provides modern implementation of file and directory 
operations
+  //    which is better then the traditional java.io, so we are using it here.
+  //    In particular, it supports atomic creation of temporary files with 
specified
+  //    permissions in the specified directory. This also avoids various 
attacks possible
+  //    when temp file name is generated first, followed by file creation.
+  //    See http://www.oracle.com/technetwork/articles/javase/nio-139333.html 
for
+  //    the description of NIO API and
+  //    http://docs.oracle.com/javase/tutorial/essential/io/legacy.html for the
+  //    description of interoperability between legacy IO api vs NIO API.
+  // 3. To avoid race conditions with readers of the metrics file, the 
implementation
+  //    dumps metrics to a temporary file in the same directory as the actual 
metrics
+  //    file and then renames it to the destination. Since both are located on 
the same
+  //    filesystem, this rename is likely to be atomic (as long as the 
underlying OS
+  //    support atomic renames.
+  //
+  // NOTE: This reporter is very similar to
+  //       org.apache.hadoop.hive.metastore.metrics.JsonReporter.
+  //       It would be good to unify the two.
+  //
+  private static final Logger LOGGER = 
LoggerFactory.getLogger(JsonFileMetricsReporter.class);
+  // Permissions for the metrics file
+  private static final FileAttribute<Set<PosixFilePermission>> FILE_ATTRS =
+      
PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-r--r--"));
+  // Thread name for reporter thread
+  private static final String JSON_REPORTER_THREAD_NAME = 
"json-metric-reporter";
 
   private final MetricRegistry metricRegistry;
   private final ObjectWriter jsonWriter;
-  private final ScheduledExecutorService executorService;
-  private final HiveConf conf;
+  private ScheduledExecutorService executorService;
   private final long interval;
-  private final String pathString;
+  // Location of JSON file
   private final Path path;
-
-  private static final Logger LOGGER = 
LoggerFactory.getLogger(JsonFileMetricsReporter.class);
+  // tmpdir is the dirname(path)
+  private final Path tmpDir;
 
   public JsonFileMetricsReporter(MetricRegistry registry, HiveConf conf) {
     this.metricRegistry = registry;
     this.jsonWriter =
         new ObjectMapper().registerModule(new 
MetricsModule(TimeUnit.MILLISECONDS,
             TimeUnit.MILLISECONDS, false)).writerWithDefaultPrettyPrinter();
-    executorService = Executors.newSingleThreadScheduledExecutor();
-    this.conf = conf;
 
     interval = 
conf.getTimeVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, 
TimeUnit.MILLISECONDS);
-    pathString = 
conf.getVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION);
-    path = new Path(pathString);
+    String pathString = 
conf.getVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION);
+    path = Paths.get(pathString).toAbsolutePath();
+    LOGGER.info("Reporting metrics to {}", path);
+    // We want to use tmpDir i the same directory as the destination file to 
support atomic
+    // move of temp file to the destination metrics file
+    tmpDir = path.getParent();
   }
 
   @Override
   public void start() {
+    executorService = Executors.newScheduledThreadPool(1,
+        new 
ThreadFactoryBuilder().setNameFormat(JSON_REPORTER_THREAD_NAME).build());
+    executorService.scheduleWithFixedDelay(this,0, interval, 
TimeUnit.MILLISECONDS);
+  }
+
+  @Override
+  public void close() {
+    executorService.shutdown();
+  }
 
-    final Path tmpPath = new Path(pathString + ".tmp");
-    URI tmpPathURI = tmpPath.toUri();
-    final FileSystem fs;
+  @Override
+  public void run() {
+    Path tmpFile = null;
     try {
-      if (tmpPathURI.getScheme() == null && tmpPathURI.getAuthority() == null) 
{
-        //default local
-        fs = FileSystem.getLocal(conf);
-      } else {
-        fs = FileSystem.get(tmpPathURI, conf);
-      }
-    }
-    catch (IOException e) {
-        LOGGER.error("Unable to access filesystem for path " + tmpPath + ". 
Aborting reporting", e);
+      // Dump metrics to string as JSON
+      String json = null;
+      try {
+        json = jsonWriter.writeValueAsString(metricRegistry);
+      } catch (JsonProcessingException e) {
+        LOGGER.error("Unable to convert json to string ", e);
         return;
-    }
+      }
 
-    Runnable task = new Runnable() {
-      public void run() {
-        try {
-          String json = null;
-          try {
-            json = jsonWriter.writeValueAsString(metricRegistry);
-          } catch (JsonProcessingException e) {
-            LOGGER.error("Unable to convert json to string ", e);
-            return;
-          }
+      // Metrics are first dumped to a temp file which is then renamed to the 
destination
+      try {
+        tmpFile = Files.createTempFile(tmpDir, "hmetrics", "json", FILE_ATTRS);
+      } catch (IOException e) {
+        LOGGER.error("failed to create temp file for JSON metrics", e);
+        return;
+      } catch (SecurityException e) {
+        // This shouldn't ever happen
+        LOGGER.error("failed to create temp file for JSON metrics: no 
permissions", e);
+        return;
+      } catch (UnsupportedOperationException e) {
+        // This shouldn't ever happen
+        LOGGER.error("failed to create temp file for JSON metrics: operartion 
not supported", e);
+        return;
+      }
 
-          BufferedWriter bw = null;
-          try {
-            fs.delete(tmpPath, true);
-            bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, 
true)));
-            bw.write(json);
-            fs.setPermission(tmpPath, FsPermission.createImmutable((short) 
0644));
-          } catch (IOException e) {
-            LOGGER.error("Unable to write to temp file " + tmpPath, e);
-            return;
-          } finally {
-            if (bw != null) {
-              bw.close();
-            }
-          }
+      // Write json to the temp file.
+      try (BufferedWriter bw = new BufferedWriter(new 
FileWriter(tmpFile.toFile()))) {
+        bw.write(json);
+      } catch (IOException e) {
+        LOGGER.error("Unable to write to temp file " + tmpFile, e);
+        return;
+      }
 
-          try {
-            fs.rename(tmpPath, path);
-            fs.setPermission(path, FsPermission.createImmutable((short) 0644));
-          } catch (IOException e) {
-            LOGGER.error("Unable to rename temp file " + tmpPath + " to " + 
pathString, e);
-            return;
-          }
-        } catch (Throwable t) {
-          // catch all errors (throwable and execptions to prevent subsequent 
tasks from being suppressed)
-          LOGGER.error("Error executing scheduled task ", t);
+      // Move temp file to the destination file
+      try {
+        Files.move(tmpFile, path, StandardCopyOption.REPLACE_EXISTING);
+      } catch (Exception e) {
+        LOGGER.error("Unable to rename temp file {} to {}", tmpFile, path, e);
+        return;
+      }
+    } catch (Throwable t) {
+      // catch all errors (throwable and execptions to prevent subsequent 
tasks from being suppressed)
+      LOGGER.error("Error executing scheduled task ", t);
+    } finally {
+      // If something happened and we were not able to rename the temp file, 
attempt to remove it
+      if (tmpFile != null && tmpFile.toFile().exists()) {
+        // Attempt to delete temp file, if this fails, not much can be done 
about it.
+        try {
+          Files.delete(tmpFile);
+        } catch (Exception e) {
+          LOGGER.error("failed to delete yemporary metrics file {}", tmpFile, 
e);
         }
       }
-    };
-
-    executorService.scheduleWithFixedDelay(task,0, interval, 
TimeUnit.MILLISECONDS);
-  }
-
-  @Override
-  public void close() {
-    executorService.shutdown();
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7dc701c5/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
 
b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
index 67f81d6..254af7d 100644
--- 
a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
+++ 
b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
@@ -20,24 +20,27 @@ package org.apache.hadoop.hive.common.metrics.metrics2;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
 import org.apache.hadoop.hive.common.metrics.MetricsTestUtils;
 import org.apache.hadoop.hive.common.metrics.common.MetricsFactory;
 import org.apache.hadoop.hive.common.metrics.common.MetricsVariable;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Assert;
-import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
+import static java.lang.Thread.sleep;
 import static org.junit.Assert.assertTrue;
 
 /**
@@ -45,32 +48,36 @@ import static org.junit.Assert.assertTrue;
  */
 public class TestCodahaleMetrics {
 
-  private static File workDir = new File(System.getProperty("test.tmp.dir"));
   private static File jsonReportFile;
-  public static MetricRegistry metricRegistry;
+  private static MetricRegistry metricRegistry;
+  private static final long REPORT_INTERVAL_MS = 100;
 
-  @Before
-  public void before() throws Exception {
+  @BeforeClass
+  public static void setUp() throws Exception {
     HiveConf conf = new HiveConf();
 
-    jsonReportFile = new File(workDir, "json_reporting");
-    jsonReportFile.delete();
+    jsonReportFile = File.createTempFile("TestCodahaleMetrics", ".json");
 
-    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local");
     conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, 
CodahaleMetrics.class.getCanonicalName());
     conf.setVar(HiveConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES,
         
"org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter, "
             + 
"org.apache.hadoop.hive.common.metrics.metrics2.JmxMetricsReporter");
-    conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION, 
jsonReportFile.toString());
-    conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "100ms");
+    conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION, 
jsonReportFile.getAbsolutePath());
+    conf.setTimeVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, 
REPORT_INTERVAL_MS,
+        TimeUnit.MILLISECONDS);
 
     MetricsFactory.init(conf);
     metricRegistry = ((CodahaleMetrics) 
MetricsFactory.getInstance()).getMetricRegistry();
   }
 
-  @After
-  public void after() throws Exception {
-    MetricsFactory.close();
+  @AfterClass
+  public static void cleanup() {
+    try {
+      MetricsFactory.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+    jsonReportFile.delete();
   }
 
   @Test
@@ -79,10 +86,11 @@ public class TestCodahaleMetrics {
     for (int i = 0; i < runs; i++) {
       MetricsFactory.getInstance().startStoredScope("method1");
       MetricsFactory.getInstance().endStoredScope("method1");
+      Timer timer = metricRegistry.getTimers().get("method1");
+      Assert.assertEquals(i + 1, timer.getCount());
     }
 
     Timer timer = metricRegistry.getTimers().get("method1");
-    Assert.assertEquals(5, timer.getCount());
     Assert.assertTrue(timer.getMeanRate() > 0);
   }
 
@@ -92,9 +100,9 @@ public class TestCodahaleMetrics {
     int runs = 5;
     for (int i = 0; i < runs; i++) {
       MetricsFactory.getInstance().incrementCounter("count1");
+      Counter counter = metricRegistry.getCounters().get("count1");
+      Assert.assertEquals(i + 1, counter.getCount());
     }
-    Counter counter = metricRegistry.getCounters().get("count1");
-    Assert.assertEquals(5L, counter.getCount());
   }
 
   @Test
@@ -119,21 +127,26 @@ public class TestCodahaleMetrics {
     Assert.assertTrue(timer.getMeanRate() > 0);
   }
 
+  /**
+   * Test JSON reporter.
+   * <ul>
+   *   <li>increment the counter value</li>
+   *   <li>wait a bit for the new repor to be written</li>
+   *   <li>read the value from JSON file</li>
+   *   <li>verify that the value matches expectation</li>
+   * </ul>
+   * This check is repeated a few times to verify that the values are updated 
over time.
+   * @throws Exception if fails to read counter value
+   */
   @Test
   public void testFileReporting() throws Exception {
     int runs = 5;
+    String  counterName = "count2";
     for (int i = 0; i < runs; i++) {
-      MetricsFactory.getInstance().incrementCounter("count2");
+      MetricsFactory.getInstance().incrementCounter(counterName);
+      sleep(REPORT_INTERVAL_MS + REPORT_INTERVAL_MS / 2);
+      Assert.assertEquals(i + 1, getCounterValue(counterName));
     }
-
-    byte[] jsonData = 
MetricsTestUtils.getFileData(jsonReportFile.getAbsolutePath(), 2000, 3);
-    ObjectMapper objectMapper = new ObjectMapper();
-
-    JsonNode rootNode = objectMapper.readTree(jsonData);
-    JsonNode countersNode = rootNode.path("counters");
-    JsonNode methodCounterNode = countersNode.path("count2");
-    JsonNode countNode = methodCounterNode.path("count");
-    Assert.assertEquals(countNode.asInt(), 5);
   }
 
   class TestMetricsVariable implements MetricsVariable {
@@ -176,6 +189,19 @@ public class TestCodahaleMetrics {
     MetricsFactory.getInstance().markMeter("meter");
     json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson();
     MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", 
"2");
+  }
 
+  /**
+   * Read counter value from JSON metric report
+   * @param name counter name
+   * @return counter value
+   * @throws FileNotFoundException if file doesn't exist
+   */
+  private int getCounterValue(String name) throws FileNotFoundException {
+    JsonParser parser = new JsonParser();
+    JsonElement element = parser.parse(new 
FileReader(jsonReportFile.getAbsolutePath()));
+    JsonObject jobj = element.getAsJsonObject();
+    jobj = jobj.getAsJsonObject("counters").getAsJsonObject(name);
+    return jobj.get("count").getAsInt();
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7dc701c5/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/metrics/JsonReporter.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/metrics/JsonReporter.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/metrics/JsonReporter.java
index b804cda..f71bb25 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/metrics/JsonReporter.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/metrics/JsonReporter.java
@@ -30,34 +30,75 @@ import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.BufferedWriter;
+import java.io.FileWriter;
 import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.attribute.FileAttribute;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
+import java.util.Set;
 import java.util.SortedMap;
 import java.util.concurrent.TimeUnit;
 
+/**
+ * A metrics reporter for Metrics that dumps metrics periodically into
+ * a file in JSON format.
+ */
 public class JsonReporter extends ScheduledReporter {
+  //
+  // Implementation notes.
+  //
+  // 1. Since only local file systems are supported, there is no need to use 
Hadoop
+  //    version of Path class.
+  // 2. java.nio package provides modern implementation of file and directory 
operations
+  //    which is better then the traditional java.io, so we are using it here.
+  //    In particular, it supports atomic creation of temporary files with 
specified
+  //    permissions in the specified directory. This also avoids various 
attacks possible
+  //    when temp file name is generated first, followed by file creation.
+  //    See http://www.oracle.com/technetwork/articles/javase/nio-139333.html 
for
+  //    the description of NIO API and
+  //    http://docs.oracle.com/javase/tutorial/essential/io/legacy.html for the
+  //    description of interoperability between legacy IO api vs NIO API.
+  // 3. To avoid race conditions with readers of the metrics file, the 
implementation
+  //    dumps metrics to a temporary file in the same directory as the actual 
metrics
+  //    file and then renames it to the destination. Since both are located on 
the same
+  //    filesystem, this rename is likely to be atomic (as long as the 
underlying OS
+  //    support atomic renames.
+  //
+  // NOTE: This reporter is very similar to
+  //       
org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter.
+  //       org.apache.hadoop.hive.metastore.metrics.JsonReporter.
+  //       It would be good to unify the two.
+  //
   private static final Logger LOG = 
LoggerFactory.getLogger(JsonReporter.class);
 
-  private final Configuration conf;
+  private static final FileAttribute<Set<PosixFilePermission>> FILE_ATTRS =
+          
PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-r--r--"));
+
   private final MetricRegistry registry;
   private ObjectWriter jsonWriter;
-  private Path path;
-  private Path tmpPath;
-  private FileSystem fs;
+  // Location of JSON file
+  private final Path path;
+  // tmpdir is the dirname(path)
+  private final Path tmpDir;
 
   private JsonReporter(MetricRegistry registry, String name, MetricFilter 
filter,
                        TimeUnit rateUnit, TimeUnit durationUnit, Configuration 
conf) {
     super(registry, name, filter, rateUnit, durationUnit);
-    this.conf = conf;
+    String pathString = MetastoreConf.getVar(conf, MetastoreConf.ConfVars 
.METRICS_JSON_FILE_LOCATION);
+    path = Paths.get(pathString).toAbsolutePath();
+    LOG.info("Reporting metrics to {}", path);
+    // We want to use tmpDir i the same directory as the destination file to 
support atomic
+    // move of temp file to the destination metrics file
+    tmpDir = path.getParent();
     this.registry = registry;
   }
 
@@ -65,23 +106,6 @@ public class JsonReporter extends ScheduledReporter {
   public void start(long period, TimeUnit unit) {
     jsonWriter = new ObjectMapper().registerModule(new 
MetricsModule(TimeUnit.MILLISECONDS,
         TimeUnit.MILLISECONDS, false)).writerWithDefaultPrettyPrinter();
-    String pathString = MetastoreConf.getVar(conf, MetastoreConf.ConfVars 
.METRICS_JSON_FILE_LOCATION);
-    path = new Path(pathString);
-
-    tmpPath = new Path(pathString + ".tmp");
-    URI tmpPathURI = tmpPath.toUri();
-    try {
-      if (tmpPathURI.getScheme() == null && tmpPathURI.getAuthority() == null) 
{
-        //default local
-        fs = FileSystem.getLocal(conf);
-      } else {
-        fs = FileSystem.get(tmpPathURI, conf);
-      }
-    }
-    catch (IOException e) {
-      LOG.error("Unable to access filesystem for path " + tmpPath + ". 
Aborting reporting", e);
-      return;
-    }
     super.start(period, unit);
   }
 
@@ -98,32 +122,49 @@ public class JsonReporter extends ScheduledReporter {
       return;
     }
 
-    BufferedWriter bw = null;
+    // Metrics are first dumped to a temp file which is then renamed to the 
destination
+    Path tmpFile = null;
     try {
-      fs.delete(tmpPath, true);
-      bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, 
true)));
-      bw.write(json);
-      fs.setPermission(tmpPath, FsPermission.createImmutable((short) 0644));
+      tmpFile = Files.createTempFile(tmpDir, "hmsmetrics", "json", FILE_ATTRS);
     } catch (IOException e) {
-      LOG.error("Unable to write to temp file " + tmpPath, e);
+      LOG.error("failed to create temp file for JSON metrics", e);
+      return;
+    } catch (SecurityException e) {
+      // This shouldn't ever happen
+      LOG.error("failed to create temp file for JSON metrics: no permissions", 
e);
       return;
+    } catch (UnsupportedOperationException e) {
+      // This shouldn't ever happen
+      LOG.error("failed to create temp file for JSON metrics: operartion not 
supported", e);
+      return;
+    }
+
+    // Use try .. finally to cleanup temp file if something goes wrong
+    try {
+      // Write json to the temp file
+      try (BufferedWriter bw = new BufferedWriter(new 
FileWriter(tmpFile.toFile()))) {
+        bw.write(json);
+      } catch (IOException e) {
+        LOG.error("Unable to write to temp file {}" + tmpFile, e);
+        return;
+      }
+
+      try {
+        Files.move(tmpFile, path, StandardCopyOption.REPLACE_EXISTING);
+      } catch (IOException e) {
+        LOG.error("Unable to rename temp file {} to {}", tmpFile, path, e);
+      }
     } finally {
-      if (bw != null) {
+      // If something happened and we were not able to rename the temp file, 
attempt to remove it
+      if (tmpFile.toFile().exists()) {
+        // Attempt to delete temp file, if this fails, not much can be done 
about it.
         try {
-          bw.close();
-        } catch (IOException e) {
-          // Not much we can do
-          LOG.error("Caught error closing json metric reporter file", e);
+          Files.delete(tmpFile);
+        } catch (Exception e) {
+          LOG.error("failed to delete yemporary metrics file {}", tmpFile, e);
         }
       }
     }
-
-    try {
-      fs.rename(tmpPath, path);
-      fs.setPermission(path, FsPermission.createImmutable((short) 0644));
-    } catch (IOException e) {
-      LOG.error("Unable to rename temp file " + tmpPath + " to " + path, e);
-    }
   }
 
   public static Builder forRegistry(MetricRegistry registry, Configuration 
conf) {

http://git-wip-us.apache.org/repos/asf/hive/blob/7dc701c5/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java
index 259a4db..ebffef7 100644
--- 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java
@@ -18,16 +18,9 @@
 package org.apache.hadoop.hive.metastore.metrics;
 
 import com.codahale.metrics.Counter;
-import com.codahale.metrics.Gauge;
-import com.codahale.metrics.Histogram;
-import com.codahale.metrics.Meter;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.Timer;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.junit.Assert;
 import org.junit.Before;
@@ -36,65 +29,38 @@ import org.junit.Test;
 import java.io.File;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
 public class TestMetrics {
+  private static final long REPORT_INTERVAL = 1;
+
+  @Before
+  public void shutdownMetrics() {
+    Metrics.shutdown();
+  }
 
   @Test
   public void jsonReporter() throws Exception {
-    String jsonFile = System.getProperty("java.io.tmpdir") + 
System.getProperty("file.separator") +
-        "TestMetricsOutput.json";
+    File jsonReportFile = File.createTempFile("TestMetrics", ".json");
+    String jsonFile = jsonReportFile.getAbsolutePath();
+
     Configuration conf = MetastoreConf.newMetastoreConf();
     MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_REPORTERS, 
"json");
     MetastoreConf.setVar(conf, 
MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile);
-    MetastoreConf.setTimeVar(conf, 
MetastoreConf.ConfVars.METRICS_JSON_FILE_INTERVAL, 1,
+    MetastoreConf.setTimeVar(conf, 
MetastoreConf.ConfVars.METRICS_JSON_FILE_INTERVAL, REPORT_INTERVAL,
         TimeUnit.SECONDS);
 
     Metrics.initialize(conf);
-
-    final List<String> words = Arrays.asList("mary", "had", "a", "little", 
"lamb");
-    MetricRegistry registry = Metrics.getRegistry();
-    registry.register("my-gauge", new Gauge<Integer>() {
-
-      @Override
-      public Integer getValue() {
-        return words.size();
-      }
-    });
-
     Counter counter = Metrics.getOrCreateCounter("my-counter");
-    counter.inc();
-    counter.inc();
-
-    Meter meter = registry.meter("my-meter");
-    meter.mark();
-    Thread.sleep(10);
-    meter.mark();
-
-    Timer timer = Metrics.getOrCreateTimer("my-timer");
-    timer.time(new Callable<Long>() {
-      @Override
-      public Long call() throws Exception {
-        Thread.sleep(100);
-        return 1L;
-      }
-    });
-
-    // Make sure it has a chance to dump it.
-    Thread.sleep(2000);
 
-    FileSystem fs = FileSystem.get(conf);
-    Path path = new Path(jsonFile);
-    Assert.assertTrue(fs.exists(path));
-
-    String json = new String(MetricsTestUtils.getFileData(jsonFile, 200, 10));
-    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
"my-counter", 2);
-    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, 
"my-meter", 2);
-    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, 
"my-timer", 1);
-    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, 
"my-gauge", 5);
+    for (int i = 0; i < 5; i++) {
+      counter.inc();
+      // Make sure it has a chance to dump it.
+      Thread.sleep(REPORT_INTERVAL * 1000 + REPORT_INTERVAL * 1000 / 2);
+      String json = new String(MetricsTestUtils.getFileData(jsonFile, 200, 
10));
+      MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
"my-counter",
+          i + 1);
+    }
   }
 
   @Test
@@ -152,11 +118,6 @@ public class TestMetrics {
     Assert.assertEquals(2, Metrics.getReporters().size());
   }
 
-  @Before
-  public void shutdownMetrics() {
-    Metrics.shutdown();
-  }
-
   // Stolen from Hive's MetricsTestUtils.  Probably should break it out into 
it's own class.
   private static class MetricsTestUtils {
 

Reply via email to