Author: dhruba
Date: Wed Oct 17 10:58:08 2007
New Revision: 585609
URL: http://svn.apache.org/viewvc?rev=585609&view=rev
Log:
HADOOP-2058. Facilitate creating new datanodes dynamically in
MiniDFSCluster. (Hairong Kuang via dhruba)
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java
Modified: lucene/hadoop/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=585609&r1=585608&r2=585609&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Oct 17 10:58:08 2007
@@ -8,6 +8,8 @@
HADOOP-2045. Change committer list on website to a table, so that
folks can list their organization, timezone, etc. (cutting)
+ HADOOP-2058. Facilitate creating new datanodes dynamically in
+ MiniDFSCluster. (Hairong Kuang via dhruba)
Branch 0.15 (unreleased changes)
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java
URL:
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java?rev=585609&r1=585608&r2=585609&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java
(original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java Wed
Oct 17 10:58:08 2007
@@ -153,7 +153,7 @@
}
/**
- * Modify the config and start up the DataNodes. The info port for
+ * Modify the config and start up additional DataNodes. The info port for
* DataNodes is guaranteed to use a free port.
*
* @param conf the base configuration to use in starting the DataNodes. This
@@ -173,6 +173,11 @@
if (nameNode == null) {
throw new IllegalStateException("NameNode is not running");
}
+
+ if (racks != null && numDataNodes > racks.length ) {
+ throw new IllegalArgumentException( "The length of racks [" +
racks.length
+ + "] is less than the number of datanodes [" + numDataNodes + "].");
+ }
// Set up the right ports for the datanodes
conf.setInt("dfs.datanode.info.port", 0);
@@ -187,7 +192,8 @@
null : new String[] {"-"+operation.toString()};
String [] dnArgs = (operation == StartupOption.UPGRADE) ? null : args;
- for (int i = 0; i < numDataNodes; i++) {
+ int curDatanodesNum = dataNodes.size();
+ for (int i = curDatanodesNum; i < curDatanodesNum+numDataNodes; i++) {
Configuration dnConf = new Configuration(conf);
if (manageDfsDirs) {
File dir1 = new File(data_dir, "data"+(2*i+1));
@@ -200,8 +206,8 @@
}
dnConf.set("dfs.data.dir", dir1.getPath() + "," + dir2.getPath());
}
- if (racks != null && i < racks.length) {
- dnConf.set("dfs.datanode.rack", racks[i]);
+ if (racks != null) {
+ dnConf.set("dfs.datanode.rack", racks[i-curDatanodesNum]);
}
System.out.println("Starting DataNode " + i + " with dfs.data.dir: "
+ dnConf.get("dfs.data.dir"));