HDFS-14121. Log message about the old hosts file format is misleading
(Contributed by Zsolt Venczel via Daniel Templeton)

Change-Id: I7ff548f6c82e0aeb08a7a50ca7c2c827db8726bb


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/aa128598
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/aa128598
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/aa128598

Branch: refs/heads/HDFS-13891
Commit: aa1285989092bc253c45b7a83acec2e9bce2c5dc
Parents: ca379e1
Author: Zsolt Venczel <[email protected]>
Authored: Fri Dec 14 13:02:45 2018 +0100
Committer: Daniel Templeton <[email protected]>
Committed: Fri Dec 14 13:54:57 2018 +0100

----------------------------------------------------------------------
 .../hdfs/util/CombinedHostsFileReader.java      | 41 ++++++++++++++------
 1 file changed, 29 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa128598/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index aa8e4c1..be1f6d0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
+import java.io.File;
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
@@ -61,26 +62,37 @@ public final class CombinedHostsFileReader {
   private CombinedHostsFileReader() {
   }
 
+  private static final String REFER_TO_DOC_MSG = " For the correct JSON" +
+          " format please refer to the documentation (https://hadoop.apache"; +
+          ".org/docs/current/hadoop-project-dist/hadoop-hdfs/HdfsDataNodeAd" +
+          "minGuide.html#JSON-based_configuration)";
+
   /**
    * Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from
+   * @param hostsFilePath the input json file to read from
    * @return the set of DatanodeAdminProperties
    * @throws IOException
    */
   public static DatanodeAdminProperties[]
-      readFile(final String hostsFile) throws IOException {
+      readFile(final String hostsFilePath) throws IOException {
     DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
     ObjectMapper objectMapper = new ObjectMapper();
+    File hostFile = new File(hostsFilePath);
     boolean tryOldFormat = false;
-    try (Reader input =
-        new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-      allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
-    } catch (JsonMappingException jme) {
-      // The old format doesn't have json top-level token to enclose the array.
-      // For backward compatibility, try parsing the old format.
-      tryOldFormat = true;
-      LOG.warn("{} has invalid JSON format." +
-          "Try the old format without top-level token defined.", hostsFile);
+
+    if (hostFile.length() > 0) {
+      try (Reader input =
+                   new InputStreamReader(new FileInputStream(hostFile),
+                           "UTF-8")) {
+        allDNs = objectMapper.readValue(input, 
DatanodeAdminProperties[].class);
+      } catch (JsonMappingException jme) {
+        // The old format doesn't have json top-level token to enclose
+        // the array.
+        // For backward compatibility, try parsing the old format.
+        tryOldFormat = true;
+      }
+    } else {
+      LOG.warn(hostsFilePath + " is empty." + REFER_TO_DOC_MSG);
     }
 
     if (tryOldFormat) {
@@ -89,13 +101,18 @@ public final class CombinedHostsFileReader {
       JsonFactory jsonFactory = new JsonFactory();
       List<DatanodeAdminProperties> all = new ArrayList<>();
       try (Reader input =
-          new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
+          new InputStreamReader(new FileInputStream(hostsFilePath),
+                  "UTF-8")) {
         Iterator<DatanodeAdminProperties> iterator =
             objectReader.readValues(jsonFactory.createParser(input));
         while (iterator.hasNext()) {
           DatanodeAdminProperties properties = iterator.next();
           all.add(properties);
         }
+        LOG.warn(hostsFilePath + " has legacy JSON format." + 
REFER_TO_DOC_MSG);
+      } catch (Throwable ex) {
+        LOG.warn(hostsFilePath + " has invalid JSON format." + 
REFER_TO_DOC_MSG,
+                ex);
       }
       allDNs = all.toArray(new DatanodeAdminProperties[all.size()]);
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to