Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Wed Apr 23 20:16:16 2014 @@ -1339,7 +1339,7 @@ public class FSNamesystem implements Nam /** * Returns edit directories that are shared between primary and secondary. * @param conf configuration - * @return Collection of edit directories. + * @return collection of edit directories from {@code conf} */ public static List<URI> getSharedEditsDirs(Configuration conf) { // don't use getStorageDirs here, because we want an empty default @@ -1774,9 +1774,9 @@ public class FSNamesystem implements Nam * before we start actual move. * * This does not support ".inodes" relative path - * @param target target file path to concatenate into - * @param srcs files that are concatenated - * @throws IOException + * @param target target to concat into + * @param srcs file that will be concatenated + * @throws IOException on error */ void concat(String target, String [] srcs) throws IOException, UnresolvedLinkException { @@ -4075,11 +4075,10 @@ public class FSNamesystem implements Nam } /** - * - * @param pendingFile - * @param storedBlock + * @param pendingFile open file that needs to be closed + * @param storedBlock last block * @return Path of the file that was closed. - * @throws IOException + * @throws IOException on error */ @VisibleForTesting String closeFileCommitBlocks(INodeFile pendingFile, BlockInfo storedBlock) @@ -4287,7 +4286,6 @@ public class FSNamesystem implements Nam /** * Perform resource checks and cache the results. - * @throws IOException */ void checkAvailableResources() { Preconditions.checkState(nnResourceChecker != null, @@ -5338,7 +5336,6 @@ public class FSNamesystem implements Nam /** * Leave safe mode. - * @throws IOException */ void leaveSafeMode() { writeLock(); @@ -5755,7 +5752,7 @@ public class FSNamesystem implements Nam /** * Sets the generation stamp that delineates random and sequentially * allocated block IDs. - * @param stamp + * @param stamp set generation stamp limit to this value */ void setGenerationStampV1Limit(long stamp) { Preconditions.checkState(generationStampV1Limit == @@ -5840,7 +5837,6 @@ public class FSNamesystem implements Nam * Determine whether the block ID was randomly generated (legacy) or * sequentially generated. The generation stamp value is used to * make the distinction. - * @param block * @return true if the block ID was randomly generated, false otherwise. */ boolean isLegacyBlock(Block block) { @@ -6077,7 +6073,6 @@ public class FSNamesystem implements Nam * Release (unregister) backup node. * <p> * Find and remove the backup stream corresponding to the node. - * @param registration * @throws IOException */ void releaseBackupNode(NamenodeRegistration registration) @@ -6213,8 +6208,8 @@ public class FSNamesystem implements Nam /** * @param renewer Renewer information - * @return Token<DelegationTokenIdentifier> - * @throws IOException + * @return delegation toek + * @throws IOException on error */ Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException { @@ -6255,10 +6250,10 @@ public class FSNamesystem implements Nam /** * - * @param token delegation token - * @return New expiryTime of the token - * @throws InvalidToken - * @throws IOException + * @param token token to renew + * @return new expiryTime of the token + * @throws InvalidToken if {@code token} is invalid + * @throws IOException on other errors */ long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { @@ -6289,8 +6284,8 @@ public class FSNamesystem implements Nam /** * - * @param token delegation token that needs to be canceled - * @throws IOException + * @param token token to cancel + * @throws IOException on error */ void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException { @@ -7201,7 +7196,7 @@ public class FSNamesystem implements Nam /** * Update internal state to indicate that a rolling upgrade is in progress. - * @param startTime start time of the rolling upgrade + * @param startTime rolling upgrade start time */ void startRollingUpgradeInternal(long startTime) throws IOException {
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java Wed Apr 23 20:16:16 2014 @@ -168,7 +168,7 @@ public class FileJournalManager implemen /** * Find all editlog segments starting at or above the given txid. - * @param fromTxId the txnid which to start looking + * @param firstTxId the txnid which to start looking * @param inProgressOk whether or not to include the in-progress edit log * segment * @return a list of remote edit logs Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java Wed Apr 23 20:16:16 2014 @@ -652,9 +652,8 @@ public abstract class INode implements I /** - * Breaks file path into components. - * @param path - * @return array of byte arrays each of which represents + * Breaks {@code path} into components. + * @return array of byte arrays each of which represents * a single path component. */ static byte[][] getPathComponents(String path) { @@ -673,8 +672,7 @@ public abstract class INode implements I } /** - * Splits an absolute path into an array of path components. - * @param path + * Splits an absolute {@code path} into an array of path components. * @throws AssertionError if the given path is invalid. * @return array of path components. */ Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java Wed Apr 23 20:16:16 2014 @@ -402,7 +402,6 @@ public class LeaseManager { /** * Get the list of inodes corresponding to valid leases. * @return list of inodes - * @throws UnresolvedLinkException */ Map<String, INodeFile> getINodesUnderConstruction() { Map<String, INodeFile> inodes = new TreeMap<String, INodeFile>(); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LogsPurgeable.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LogsPurgeable.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LogsPurgeable.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LogsPurgeable.java Wed Apr 23 20:16:16 2014 @@ -42,7 +42,6 @@ interface LogsPurgeable { * * @param fromTxId the first transaction id we want to read * @param inProgressOk whether or not in-progress streams should be returned - * @return a list of streams * @throws IOException if the underlying storage has an error or is otherwise * inaccessible */ Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java Wed Apr 23 20:16:16 2014 @@ -425,8 +425,7 @@ public class NNStorage extends Storage i /** * Write last checkpoint time into a separate file. - * - * @param sd + * @param sd storage directory * @throws IOException */ void writeTransactionIdFile(StorageDirectory sd, long txid) throws IOException { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Wed Apr 23 20:16:16 2014 @@ -355,8 +355,6 @@ public class NameNode implements NameNod /** - * TODO:FEDERATION - * @param filesystemURI * @return address of file system */ public static InetSocketAddress getAddress(URI filesystemURI) { @@ -799,8 +797,8 @@ public class NameNode implements NameNod * Interactively confirm that formatting is desired * for each existing directory and format them. * - * @param conf - * @param force + * @param conf configuration to use + * @param force if true, format regardless of whether dirs exist * @return true if formatting was aborted, false otherwise * @throws IOException */ Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java Wed Apr 23 20:16:16 2014 @@ -1176,9 +1176,8 @@ class NameNodeRpcServer implements Namen /** * Verify version. - * - * @param version - * @throws IOException + * @param version layout version + * @throws IOException on layout version mismatch */ void verifyLayoutVersion(int version) throws IOException { if (version != HdfsConstants.NAMENODE_LAYOUT_VERSION) Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java Wed Apr 23 20:16:16 2014 @@ -159,7 +159,6 @@ public class NamenodeFsck { * @param totalDatanodes number of live datanodes * @param minReplication minimum replication * @param remoteAddress source address of the fsck request - * @throws IOException */ NamenodeFsck(Configuration conf, NameNode namenode, NetworkTopology networktopology, Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Wed Apr 23 20:16:16 2014 @@ -209,7 +209,6 @@ public class SecondaryNameNode implement /** * Initialize SecondaryNameNode. - * @param commandLineOpts */ private void initialize(final Configuration conf, CommandLineOpts commandLineOpts) throws IOException { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java Wed Apr 23 20:16:16 2014 @@ -130,8 +130,8 @@ abstract public class HAState { * Check if an operation is supported in a given state. * @param context HA context * @param op Type of the operation. - * @throws UnsupportedActionException if a given type of operation is not - * supported in this state. + * @throws StandbyException if a given type of operation is not + * supported in standby state */ public abstract void checkOperation(final HAContext context, final OperationCategory op) throws StandbyException; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiff.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiff.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiff.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiff.java Wed Apr 23 20:16:16 2014 @@ -86,7 +86,6 @@ abstract class AbstractINodeDiff<N exten return posteriorDiff; } - /** @return the posterior diff. */ final void setPosterior(D posterior) { posteriorDiff = posterior; } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/BlockIdCommand.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/BlockIdCommand.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/BlockIdCommand.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/BlockIdCommand.java Wed Apr 23 20:16:16 2014 @@ -32,7 +32,6 @@ public class BlockIdCommand extends Data /** * Create BlockCommand for the given action - * @param blocks blocks related to the action */ public BlockIdCommand(int action, String poolId, long[] blockIds) { super(action); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java Wed Apr 23 20:16:16 2014 @@ -119,9 +119,9 @@ public interface DatanodeProtocol { * and should be deleted. This function is meant to upload *all* * the locally-stored blocks. It's invoked upon startup and then * infrequently afterwards. - * @param registration - * @param poolId - the block pool ID for the blocks - * @param reports - report of blocks per storage + * @param registration datanode registration + * @param poolId the block pool ID for the blocks + * @param reports report of blocks per storage * Each finalized block is represented as 3 longs. Each under- * construction replica is represented as 4 longs. * This is done instead of Block[] to reduce memory used by block reports. Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeStorage.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeStorage.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeStorage.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeStorage.java Wed Apr 23 20:16:16 2014 @@ -48,8 +48,6 @@ public class DatanodeStorage { /** * Create a storage with {@link State#NORMAL} and {@link StorageType#DEFAULT}. - * - * @param storageID */ public DatanodeStorage(String storageID) { this(storageID, State.NORMAL, StorageType.DEFAULT); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java Wed Apr 23 20:16:16 2014 @@ -39,7 +39,7 @@ public abstract class ServerCommand { * * @see DatanodeProtocol * @see NamenodeProtocol - * @param action + * @param action protocol specific action */ public ServerCommand(int action) { this.action = action; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java Wed Apr 23 20:16:16 2014 @@ -130,9 +130,6 @@ public class DFSck extends Configured im out.println(USAGE + "\n"); ToolRunner.printGenericCommandUsage(out); } - /** - * @param args - */ @Override public int run(final String[] args) throws IOException { if (args.length == 0) { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java Wed Apr 23 20:16:16 2014 @@ -167,8 +167,7 @@ public class GetConf extends Configured } - /** Method to be overridden by sub classes for specific behavior - * @param args */ + /** Method to be overridden by sub classes for specific behavior */ int doWorkInternal(GetConf tool, String[] args) throws Exception { String value = tool.getConf().getTrimmed(key); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java Wed Apr 23 20:16:16 2014 @@ -30,9 +30,7 @@ import org.apache.hadoop.hdfs.Distribute @InterfaceAudience.Private public class HDFSConcat { private final static String def_uri = "hdfs://localhost:9000"; - /** - * @param args - */ + public static void main(String... args) throws IOException { if(args.length < 2) { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/JMXGet.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/JMXGet.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/JMXGet.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/JMXGet.java Wed Apr 23 20:16:16 2014 @@ -217,7 +217,7 @@ public class JMXGet { } /** - * @param msg + * @param msg error message */ private static void err(String msg) { System.err.println(msg); @@ -274,13 +274,7 @@ public class JMXGet { return commandLine; } - /** - * main - * - * @param args - */ public static void main(String[] args) { - int res = -1; // parse arguments Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/BinaryEditsVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/BinaryEditsVisitor.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/BinaryEditsVisitor.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/BinaryEditsVisitor.java Wed Apr 23 20:16:16 2014 @@ -37,8 +37,7 @@ public class BinaryEditsVisitor implemen /** * Create a processor that writes to a given file - * - * @param filename Name of file to write output to + * @param outputName Name of file to write output to */ public BinaryEditsVisitor(String outputName) throws IOException { this.elfos = new EditLogFileOutputStream(new Configuration(), Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java Wed Apr 23 20:16:16 2014 @@ -63,8 +63,6 @@ public class DataTransferThrottler { /** * Sets throttle bandwidth. This takes affect latest by the end of current * period. - * - * @param bytesPerSecond */ public synchronized void setBandwidth(long bytesPerSecond) { if ( bytesPerSecond <= 0 ) { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AclPermissionParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AclPermissionParam.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AclPermissionParam.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AclPermissionParam.java Wed Apr 23 20:16:16 2014 @@ -60,10 +60,7 @@ public class AclPermissionParam extends } /** - * Parse the list of AclEntry and returns aclspec. - * - * @param List <AclEntry> - * @return String + * @return parse {@code aclEntry} and return aclspec */ private static String parseAclSpec(List<AclEntry> aclEntry) { return StringUtils.join(aclEntry, ","); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BenchmarkThroughput.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BenchmarkThroughput.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BenchmarkThroughput.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BenchmarkThroughput.java Wed Apr 23 20:16:16 2014 @@ -229,7 +229,7 @@ public class BenchmarkThroughput extends } /** - * @param args + * @param args arguments */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(new HdfsConfiguration(), Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java Wed Apr 23 20:16:16 2014 @@ -1386,8 +1386,8 @@ public class MiniDFSCluster { /** * Finalize cluster for the namenode at the given index * @see MiniDFSCluster#finalizeCluster(Configuration) - * @param nnIndex - * @param conf + * @param nnIndex index of the namenode + * @param conf configuration * @throws Exception */ public void finalizeCluster(int nnIndex, Configuration conf) throws Exception { @@ -2248,7 +2248,7 @@ public class MiniDFSCluster { * to determine the location of the storage of a DN instance in the mini cluster * @param dnIndex datanode index * @param dirIndex directory index. - * @return + * @return storage directory path */ private static String getStorageDirPath(int dnIndex, int dirIndex) { return "data/data" + (2 * dnIndex + 1 + dirIndex); @@ -2274,8 +2274,8 @@ public class MiniDFSCluster { } /** * Get directory relative to block pool directory in the datanode - * @param storageDir - * @return current directory + * @param storageDir storage directory + * @return current directory in the given storage directory */ public static String getBPDir(File storageDir, String bpid, String dirName) { return getBPDir(storageDir, bpid) + dirName + "/"; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManagerTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManagerTestUtil.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManagerTestUtil.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManagerTestUtil.java Wed Apr 23 20:16:16 2014 @@ -101,7 +101,6 @@ public class BlockManagerTestUtil { } /** - * @param blockManager * @return replication monitor thread instance from block manager. */ public static Daemon getReplicationThread(final BlockManager blockManager) @@ -111,7 +110,6 @@ public class BlockManagerTestUtil { /** * Stop the replication monitor thread - * @param blockManager */ public static void stopReplicationThread(final BlockManager blockManager) throws IOException { @@ -126,7 +124,6 @@ public class BlockManagerTestUtil { } /** - * @param blockManager * @return corruptReplicas from block manager */ public static CorruptReplicasMap getCorruptReplicas(final BlockManager blockManager){ @@ -135,7 +132,6 @@ public class BlockManagerTestUtil { } /** - * @param blockManager * @return computed block replication and block invalidation work that can be * scheduled on data-nodes. * @throws IOException @@ -158,7 +154,7 @@ public class BlockManagerTestUtil { * regardless of invalidation/replication limit configurations. * * NB: you may want to set - * {@link DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY} to + * {@link DFSConfigKeys#DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY} to * a high value to ensure that all work is calculated. */ public static int computeAllPendingWork(BlockManager bm) { @@ -200,7 +196,7 @@ public class BlockManagerTestUtil { /** * Change whether the block placement policy will prefer the writer's * local Datanode or not. - * @param prefer + * @param prefer if true, prefer local node */ public static void setWritingPrefersLocalNode( BlockManager bm, boolean prefer) { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java Wed Apr 23 20:16:16 2014 @@ -171,9 +171,6 @@ public abstract class BlockReportTestBas * Utility routine to send block reports to the NN, either in a single call * or reporting one storage per call. * - * @param dnR - * @param poolId - * @param reports * @throws IOException */ protected abstract void sendBlockReports(DatanodeRegistration dnR, String poolId, Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java Wed Apr 23 20:16:16 2014 @@ -833,8 +833,8 @@ public class SimulatedFSDataset implemen /** * An input stream of size l with repeated bytes - * @param l - * @param iRepeatedData + * @param l size of the stream + * @param iRepeatedData byte that is repeated in the stream */ SimulatedInputStream(long l, byte iRepeatedData) { length = l; @@ -843,17 +843,14 @@ public class SimulatedFSDataset implemen /** * An input stream of of the supplied data - * - * @param iData + * @param iData data to construct the stream */ SimulatedInputStream(byte[] iData) { data = iData; length = data.length; - } /** - * * @return the lenght of the input stream */ long getLength() { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java Wed Apr 23 20:16:16 2014 @@ -131,14 +131,10 @@ public class CreateEditsLog { printUsageExit(); } /** - * @param args + * @param args arguments * @throws IOException */ - public static void main(String[] args) - throws IOException { - - - + public static void main(String[] args) throws IOException { long startingBlockId = 1; int numFiles = 0; short replication = 1; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java?rev=1589507&r1=1589506&r2=1589507&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java Wed Apr 23 20:16:16 2014 @@ -202,7 +202,7 @@ public class NNThroughputBenchmark imple * {@link #executeOp(int, int, String)}, which can have different meanings * depending on the operation performed. * - * @param daemonId + * @param daemonId id of the daemon calling this method * @return the argument */ abstract String getExecutionArgument(int daemonId); @@ -322,11 +322,10 @@ public class NNThroughputBenchmark imple /** * Parse first 2 arguments, corresponding to the "-op" option. * - * @param args + * @param args argument list * @return true if operation is all, which means that options not related * to this operation should be ignored, or false otherwise, meaning * that usage should be printed when an unrelated option is encountered. - * @throws IOException */ protected boolean verifyOpArgument(List<String> args) { if(args.size() < 2 || ! args.get(0).startsWith("-op"))