Author: stack
Date: Mon Feb 11 16:51:11 2008
New Revision: 620681
URL: http://svn.apache.org/viewvc?rev=620681&view=rev
Log:
HBASE-417 Factor TableOperation and subclasses into separate files
from HMaster
Modified:
hadoop/core/trunk/conf/log4j.properties
hadoop/core/trunk/docs/hadoop-default.html
hadoop/core/trunk/docs/skin/images/rc-b-l-15-1body-2menu-3menu.png
hadoop/core/trunk/docs/skin/images/rc-b-r-15-1body-2menu-3menu.png
hadoop/core/trunk/docs/skin/images/rc-b-r-5-1header-2tab-
selected-3tab-selected.png
hadoop/core/trunk/docs/skin/images/rc-t-
l-5-1header-2searchbox-3searchbox.png
hadoop/core/trunk/docs/skin/images/rc-t-l-5-1header-2tab-
selected-3tab-selected.png
hadoop/core/trunk/docs/skin/images/rc-t-l-5-1header-2tab-
unselected-3tab-unselected.png
hadoop/core/trunk/docs/skin/images/rc-t-r-15-1body-2menu-3menu.png
hadoop/core/trunk/docs/skin/images/rc-t-
r-5-1header-2searchbox-3searchbox.png
hadoop/core/trunk/docs/skin/images/rc-t-r-5-1header-2tab-
selected-3tab-selected.png
hadoop/core/trunk/docs/skin/images/rc-t-r-5-1header-2tab-
unselected-3tab-unselected.png
Modified: hadoop/core/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/conf/
log4j.properties?rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
--- hadoop/core/trunk/conf/log4j.properties (original)
+++ hadoop/core/trunk/conf/log4j.properties Mon Feb 11 16:51:11 2008
@@ -77,6 +77,8 @@
#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+log4j.logger.org.apache.hadoop.hbase=DEBUG
+#log4j.logger.org.mortbay.http=DEBUG
#
# Event Counter Appender
Modified: hadoop/core/trunk/docs/hadoop-default.html
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/hadoop-
default.html?rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
--- hadoop/core/trunk/docs/hadoop-default.html (original)
+++ hadoop/core/trunk/docs/hadoop-default.html Mon Feb 11 16:51:11
2008
@@ -62,10 +62,6 @@
determine the host, port, etc. for a filesystem.</td>
</tr>
<tr>
-<td><a name="fs.trash.root">fs.trash.root</a></td><td>$
{hadoop.tmp.dir}/Trash</td><td>The trash directory, used by
FsShell's 'rm' command.
- </td>
-</tr>
-<tr>
<td><a name="fs.trash.interval">fs.trash.interval</a></td><td>0</
td><td>Number of minutes between trash checkpoints.
If zero, the trash feature is disabled.
</td>
@@ -106,25 +102,25 @@
</td>
</tr>
<tr>
-<td><a
name="dfs.secondary.http.bindAddress">dfs.secondary.http.bindAddress</
a></td><td>0.0.0.0:50090</td><td>
- The secondary namenode http server bind address and port.
+<td><a
name="dfs.secondary.http.address">dfs.secondary.http.address</a></
td><td>0.0.0.0:50090</td><td>
+ The secondary namenode http server address and port.
If the port is 0 then the server will start on a free port.
</td>
</tr>
<tr>
-<td><a name="dfs.datanode.bindAddress">dfs.datanode.bindAddress</
a></td><td>0.0.0.0:50010</td><td>
- The address where the datanode will listen to.
+<td><a name="dfs.datanode.address">dfs.datanode.address</a></
td><td>0.0.0.0:50010</td><td>
+ The address where the datanode server will listen to.
If the port is 0 then the server will start on a free port.
</td>
</tr>
<tr>
-<td><a
name="dfs.datanode.http.bindAddress">dfs.datanode.http.bindAddress</
a></td><td>0.0.0.0:50075</td><td>
- The datanode http server bind address and port.
+<td><a name="dfs.datanode.http.address">dfs.datanode.http.address</
a></td><td>0.0.0.0:50075</td><td>
+ The datanode http server address and port.
If the port is 0 then the server will start on a free port.
</td>
</tr>
<tr>
-<td><a name="dfs.http.bindAddress">dfs.http.bindAddress</a></
td><td>0.0.0.0:50070</td><td>
+<td><a name="dfs.http.address">dfs.http.address</a></
td><td>0.0.0.0:50070</td><td>
The address and the base port where the dfs namenode web ui
will listen on.
If the port is 0 then the server will start on a free port.
</td>
@@ -163,6 +159,11 @@
directories, for redundancy. </td>
</tr>
<tr>
+<td><a name="dfs.web.ugi">dfs.web.ugi</a></
td><td>webuser,webgroup</td><td>The user account used by the web
interface.
+ Syntax: USERNAME,GROUP1,GROUP2, ...
+ </td>
+</tr>
+<tr>
<td><a name="dfs.permissions">dfs.permissions</a></td><td>true</
td><td>
If "true", enable permission checking in HDFS.
If "false", permission checking is turned off,
@@ -267,6 +268,12 @@
</td>
</tr>
<tr>
+<td><a
name="dfs.namenode.decommission.interval">dfs.namenode.decommission.in
terval</a></td><td>300</td><td>Namenode periodicity in seconds to
check if decommission is complete.</td>
+</tr>
+<tr>
+<td><a name="dfs.replication.interval">dfs.replication.interval</
a></td><td>3</td><td>The periodicity in seconds with which the
namenode computes repliaction work for datanodes. </td>
+</tr>
+<tr>
<td><a name="fs.s3.block.size">fs.s3.block.size</a></
td><td>67108864</td><td>Block size to use when writing files to
S3.</td>
</tr>
<tr>
@@ -291,8 +298,8 @@
</td>
</tr>
<tr>
-<td><a
name="mapred.job.tracker.http.bindAddress">mapred.job.tracker.http.bin
dAddress</a></td><td>0.0.0.0:50030</td><td>
- The job tracker http server bind address and port.
+<td><a
name="mapred.job.tracker.http.address">mapred.job.tracker.http.address
</a></td><td>0.0.0.0:50030</td><td>
+ The job tracker http server address and port the server will
listen on.
If the port is 0 then the server will start on a free port.
</td>
</tr>
@@ -303,8 +310,10 @@
</td>
</tr>
<tr>
-<td><a
name="mapred.task.tracker.report.bindAddress">mapred.task.tracker.repo
rt.bindAddress</a></td><td>127.0.0.1:0</td><td>The interface that
task processes use to communicate
- with their parent tasktracker process.</td>
+<td><a
name="mapred.task.tracker.report.address">mapred.task.tracker.report.a
ddress</a></td><td>127.0.0.1:0</td><td>The interface and port that
task tracker server listens on.
+ Since it is only connected to by the tasks, it uses the local
interface.
+ EXPERT ONLY. Should only be changed if your host does not have
the loopback
+ interface.</td>
</tr>
<tr>
<td><a name="mapred.local.dir">mapred.local.dir</a></td><td>$
{hadoop.tmp.dir}/mapred/local</td><td>The local directory where
MapReduce stores intermediate
@@ -452,8 +461,8 @@
</td>
</tr>
<tr>
-<td><a
name="mapred.task.tracker.http.bindAddress">mapred.task.tracker.http.b
indAddress</a></td><td>0.0.0.0:50060</td><td>
- The task tracker http server bind address and port.
+<td><a
name="mapred.task.tracker.http.address">mapred.task.tracker.http.addre
ss</a></td><td>0.0.0.0:50060</td><td>
+ The task tracker http server address and port.
If the port is 0 then the server will start on a free port.
</td>
</tr>
@@ -564,6 +573,22 @@
</td>
</tr>
<tr>
+<td><a name="mapred.task.profile">mapred.task.profile</a></
td><td>false</td><td>To set whether the system should collect profiler
+ information for some of the tasks in this job? The
information is stored
+ in the the user log directory. The value is "true" if task
profiling
+ is enabled.</td>
+</tr>
+<tr>
+<td><a name="mapred.task.profile.maps">mapred.task.profile.maps</
a></td><td>0-2</td><td> To set the ranges of map tasks to profile.
+ mapred.task.profile has to be set to true for the value to be
accounted.
+ </td>
+</tr>
+<tr>
+<td><a
name="mapred.task.profile.reduces">mapred.task.profile.reduces</a></
td><td>0-2</td><td> To set the ranges of reduce tasks to profile.
+ mapred.task.profile has to be set to true for the value to be
accounted.
+ </td>
+</tr>
+<tr>
<td><a name="ipc.client.timeout">ipc.client.timeout</a></
td><td>60000</td><td>Defines the timeout for IPC calls in
milliseconds.</td>
</tr>
<tr>
@@ -593,6 +618,18 @@
<tr>
<td><a
name="ipc.server.listen.queue.size">ipc.server.listen.queue.size</
a></td><td>128</td><td>Indicates the length of the listen queue for
servers accepting
client connections.
+ </td>
+</tr>
+<tr>
+<td><a name="ipc.server.tcpnodelay">ipc.server.tcpnodelay</a></
td><td>false</td><td>Turn on/off Nagle's algorithm for the TCP
socket connection on
+ the server. Setting to true disables the algorithm and may
decrease latency
+ with a cost of more/smaller packets.
+ </td>
+</tr>
+<tr>
+<td><a name="ipc.client.tcpnodelay">ipc.client.tcpnodelay</a></
td><td>false</td><td>Turn on/off Nagle's algorithm for the TCP
socket connection on
+ the client. Setting to true disables the algorithm and may
decrease latency
+ with a cost of more/smaller packets.
</td>
</tr>
<tr>
Modified: hadoop/core/trunk/docs/skin/images/rc-b-
l-15-1body-2menu-3menu.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-b-l-15-1body-2menu-3menu.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-b-
r-15-1body-2menu-3menu.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-b-r-15-1body-2menu-3menu.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-b-r-5-1header-2tab-
selected-3tab-selected.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-b-r-5-1header-2tab-selected-3tab-selected.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-
l-5-1header-2searchbox-3searchbox.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-l-5-1header-2searchbox-3searchbox.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-l-5-1header-2tab-
selected-3tab-selected.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-l-5-1header-2tab-selected-3tab-selected.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-l-5-1header-2tab-
unselected-3tab-unselected.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-l-5-1header-2tab-unselected-3tab-unselected.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-
r-15-1body-2menu-3menu.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-r-15-1body-2menu-3menu.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-
r-5-1header-2searchbox-3searchbox.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-r-5-1header-2searchbox-3searchbox.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-r-5-1header-2tab-
selected-3tab-selected.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-r-5-1header-2tab-selected-3tab-selected.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.
Modified: hadoop/core/trunk/docs/skin/images/rc-t-r-5-1header-2tab-
unselected-3tab-unselected.png
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/docs/skin/
images/rc-t-r-5-1header-2tab-unselected-3tab-unselected.png?
rev=620681&r1=620680&r2=620681&view=diff
======================================================================
========
Binary files - no diff available.