[36/50] hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-21 Thread asuresh
HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/230b85d5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/230b85d5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/230b85d5

Branch: refs/heads/YARN-6592
Commit: 230b85d5865b7e08fb7aaeab45295b5b966011ef
Parents: 7e58b24
Author: Ming Ma 
Authored: Wed Sep 20 09:03:59 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:03:59 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 67 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 23 ---
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 44 -
 .../src/test/resources/dfs.hosts.json   | 16 +++--
 .../src/test/resources/legacy.dfs.hosts.json|  7 ++
 6 files changed, 102 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/230b85d5/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 8da5655..aa8e4c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -19,58 +19,85 @@
 package org.apache.hadoop.hdfs.util;
 
 import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
-  private static final ObjectReader READER =
-  new ObjectMapper().readerFor(DatanodeAdminProperties.class);
-  private static final JsonFactory JSON_FACTORY = new JsonFactory();
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
 
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  READER.readValues(JSON_FACTORY.createParser(input));
-  while (iterator.hasNext()) {
-DatanodeAdminProperties properties = iterator.next();
-allDNs.add(properties);
+new InputStreamReader(new 

[02/13] hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-21 Thread aengineer
HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/230b85d5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/230b85d5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/230b85d5

Branch: refs/heads/HDFS-7240
Commit: 230b85d5865b7e08fb7aaeab45295b5b966011ef
Parents: 7e58b24
Author: Ming Ma 
Authored: Wed Sep 20 09:03:59 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:03:59 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 67 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 23 ---
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 44 -
 .../src/test/resources/dfs.hosts.json   | 16 +++--
 .../src/test/resources/legacy.dfs.hosts.json|  7 ++
 6 files changed, 102 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/230b85d5/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 8da5655..aa8e4c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -19,58 +19,85 @@
 package org.apache.hadoop.hdfs.util;
 
 import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
-  private static final ObjectReader READER =
-  new ObjectMapper().readerFor(DatanodeAdminProperties.class);
-  private static final JsonFactory JSON_FACTORY = new JsonFactory();
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
 
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  READER.readValues(JSON_FACTORY.createParser(input));
-  while (iterator.hasNext()) {
-DatanodeAdminProperties properties = iterator.next();
-allDNs.add(properties);
+new InputStreamReader(new 

[35/50] [abbrv] hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-20 Thread jhung
HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/230b85d5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/230b85d5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/230b85d5

Branch: refs/heads/YARN-5734
Commit: 230b85d5865b7e08fb7aaeab45295b5b966011ef
Parents: 7e58b24
Author: Ming Ma 
Authored: Wed Sep 20 09:03:59 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:03:59 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 67 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 23 ---
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 44 -
 .../src/test/resources/dfs.hosts.json   | 16 +++--
 .../src/test/resources/legacy.dfs.hosts.json|  7 ++
 6 files changed, 102 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/230b85d5/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 8da5655..aa8e4c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -19,58 +19,85 @@
 package org.apache.hadoop.hdfs.util;
 
 import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
-  private static final ObjectReader READER =
-  new ObjectMapper().readerFor(DatanodeAdminProperties.class);
-  private static final JsonFactory JSON_FACTORY = new JsonFactory();
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
 
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  READER.readValues(JSON_FACTORY.createParser(input));
-  while (iterator.hasNext()) {
-DatanodeAdminProperties properties = iterator.next();
-allDNs.add(properties);
+new InputStreamReader(new 

hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-20 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 a81167e2e -> c54310a63


HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c54310a6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c54310a6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c54310a6

Branch: refs/heads/branch-2.8
Commit: c54310a6383f075eeb6c8b61efcd045cb610c5cd
Parents: a81167e
Author: Ming Ma 
Authored: Wed Sep 20 09:21:32 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:21:32 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 75 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 26 +++
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 47 +++-
 .../src/test/resources/dfs.hosts.json   | 12 ++--
 .../src/test/resources/legacy.dfs.hosts.json|  5 ++
 6 files changed, 107 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c54310a6/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 33acb91..f88aaef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -18,58 +18,87 @@
 
 package org.apache.hadoop.hdfs.util;
 
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.EOFException;
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
-
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
+
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
-ObjectMapper mapper = new ObjectMapper();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  mapper.readValues(new JsonFactory().createJsonParser(input),
-  DatanodeAdminProperties.class);
-  while (iterator.hasNext()) {
-DatanodeAdminProperties properties = iterator.next();
-

hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-20 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8.2 e6597fe30 -> 7580a10e3


HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7580a10e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7580a10e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7580a10e

Branch: refs/heads/branch-2.8.2
Commit: 7580a10e3ebf6d1c58530af623cb27136b8a3de2
Parents: e6597fe
Author: Ming Ma 
Authored: Wed Sep 20 09:09:57 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:09:57 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 75 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 26 +++
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 47 +++-
 .../src/test/resources/dfs.hosts.json   | 12 ++--
 .../src/test/resources/legacy.dfs.hosts.json|  5 ++
 6 files changed, 107 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7580a10e/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 33acb91..f88aaef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -18,58 +18,87 @@
 
 package org.apache.hadoop.hdfs.util;
 
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.EOFException;
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
-
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
+
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
-ObjectMapper mapper = new ObjectMapper();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  mapper.readValues(new JsonFactory().createJsonParser(input),
-  DatanodeAdminProperties.class);
-  while (iterator.hasNext()) {
-DatanodeAdminProperties properties = iterator.next();
-

hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-20 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 6581f2dea -> 7dd662eaf


HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7dd662ea
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7dd662ea
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7dd662ea

Branch: refs/heads/branch-2
Commit: 7dd662eafd5448b9c858e61877632f5cecc0e13e
Parents: 6581f2d
Author: Ming Ma 
Authored: Wed Sep 20 09:08:41 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:08:41 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 74 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 23 +++---
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 44 +++-
 .../src/test/resources/dfs.hosts.json   | 16 +++--
 .../src/test/resources/legacy.dfs.hosts.json|  7 ++
 6 files changed, 106 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7dd662ea/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 9b23ad0..f88aaef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -18,59 +18,87 @@
 
 package org.apache.hadoop.hdfs.util;
 
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.EOFException;
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectReader;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
-  private static final ObjectReader READER =
-  new ObjectMapper().reader(DatanodeAdminProperties.class);
-  private static final JsonFactory JSON_FACTORY = new JsonFactory();
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
 
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  

hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-20 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/branch-3.0 5c158f2f5 -> 816933722


HDFS-12473. Change hosts JSON file format.

(cherry picked from commit 230b85d5865b7e08fb7aaeab45295b5b966011ef)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/81693372
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/81693372
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/81693372

Branch: refs/heads/branch-3.0
Commit: 816933722af4d96a7b848a461f4228c2099c44c8
Parents: 5c158f2
Author: Ming Ma 
Authored: Wed Sep 20 09:03:59 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:05:56 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 67 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 23 ---
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 44 -
 .../src/test/resources/dfs.hosts.json   | 16 +++--
 .../src/test/resources/legacy.dfs.hosts.json|  7 ++
 6 files changed, 102 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/81693372/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 8da5655..aa8e4c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -19,58 +19,85 @@
 package org.apache.hadoop.hdfs.util;
 
 import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
-  private static final ObjectReader READER =
-  new ObjectMapper().readerFor(DatanodeAdminProperties.class);
-  private static final JsonFactory JSON_FACTORY = new JsonFactory();
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
 
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  READER.readValues(JSON_FACTORY.createParser(input));
-  while 

hadoop git commit: HDFS-12473. Change hosts JSON file format.

2017-09-20 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/trunk 7e58b2478 -> 230b85d58


HDFS-12473. Change hosts JSON file format.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/230b85d5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/230b85d5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/230b85d5

Branch: refs/heads/trunk
Commit: 230b85d5865b7e08fb7aaeab45295b5b966011ef
Parents: 7e58b24
Author: Ming Ma 
Authored: Wed Sep 20 09:03:59 2017 -0700
Committer: Ming Ma 
Committed: Wed Sep 20 09:03:59 2017 -0700

--
 .../hdfs/util/CombinedHostsFileReader.java  | 67 ++--
 .../hdfs/util/CombinedHostsFileWriter.java  | 23 ---
 .../CombinedHostFileManager.java|  3 +-
 .../hdfs/util/TestCombinedHostsFileReader.java  | 44 -
 .../src/test/resources/dfs.hosts.json   | 16 +++--
 .../src/test/resources/legacy.dfs.hosts.json|  7 ++
 6 files changed, 102 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/230b85d5/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 8da5655..aa8e4c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -19,58 +19,85 @@
 package org.apache.hadoop.hdfs.util;
 
 import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * Reader support for JSON based datanode configuration, an alternative
+ * Reader support for JSON-based datanode configuration, an alternative format
  * to the exclude/include files configuration.
- * The JSON file format is the array of elements where each element
+ * The JSON file format defines the array of elements where each element
  * in the array describes the properties of a datanode. The properties of
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
  *
- * {"hostName": "host1"}
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * [
+ *   {"hostName": "host1"},
+ *   {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
+ *   {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
+ * ]
  */
 @InterfaceAudience.LimitedPrivate({"HDFS"})
 @InterfaceStability.Unstable
 public final class CombinedHostsFileReader {
-  private static final ObjectReader READER =
-  new ObjectMapper().readerFor(DatanodeAdminProperties.class);
-  private static final JsonFactory JSON_FACTORY = new JsonFactory();
+
+  public static final Logger LOG =
+  LoggerFactory.getLogger(CombinedHostsFileReader.class);
 
   private CombinedHostsFileReader() {
   }
 
   /**
* Deserialize a set of DatanodeAdminProperties from a json file.
-   * @param hostsFile the input json file to read from.
+   * @param hostsFile the input json file to read from
* @return the set of DatanodeAdminProperties
* @throws IOException
*/
-  public static Set
+  public static DatanodeAdminProperties[]
   readFile(final String hostsFile) throws IOException {
-HashSet allDNs = new HashSet<>();
+DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+ObjectMapper objectMapper = new ObjectMapper();
+boolean tryOldFormat = false;
 try (Reader input =
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
-  Iterator iterator =
-  READER.readValues(JSON_FACTORY.createParser(input));
-  while (iterator.hasNext()) {
-DatanodeAdminProperties properties = iterator.next();
-