Author: sharad
Date: Fri May 8 10:10:33 2009
New Revision: 772911
URL: http://svn.apache.org/viewvc?rev=772911&view=rev
Log:
HADOOP-5679. Resolve findbugs warnings in core/streaming/pipes/examples.
Contributed by Jothi Padmanabhan.
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/LocalFileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/io/SequenceFile.java
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Client.java
hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Server.java
hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java
hadoop/core/trunk/src/core/org/apache/hadoop/record/meta/StructTypeID.java
hadoop/core/trunk/src/core/org/apache/hadoop/util/ProgramDriver.java
hadoop/core/trunk/src/core/org/apache/hadoop/util/Progress.java
hadoop/core/trunk/src/core/org/apache/hadoop/util/RunJar.java
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/DBCountPageView.java
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java
hadoop/core/trunk/src/test/findbugsExcludeFile.xml
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri May 8 10:10:33 2009
@@ -50,6 +50,9 @@
HADOOP-5738. Split "waiting_tasks" JobTracker metric into waiting maps and
waiting reduces. (Sreekanth Ramakrishnan via cdouglas)
+ HADOOP-5679. Resolve findbugs warnings in core/streaming/pipes/examples.
+ (Jothi Padmanabhan via sharad)
+
NEW FEATURES
HADOOP-4268. Change fsck to use ClientProtocol methods so that the
Modified:
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/JarBuilder.java
Fri May 8 10:10:33 2009
@@ -147,9 +147,12 @@
void addFileStream(JarOutputStream dst, String jarBaseName, File file)
throws IOException {
FileInputStream in = new FileInputStream(file);
- String name = jarBaseName + file.getName();
- addNamedStream(dst, name, in);
- in.close();
+ try {
+ String name = jarBaseName + file.getName();
+ addNamedStream(dst, name, in);
+ } finally {
+ in.close();
+ }
}
void addDirectory(JarOutputStream dst, String jarBaseName, File dir, int
depth) throws IOException {
Modified:
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
Fri May 8 10:10:33 2009
@@ -224,11 +224,15 @@
errThread_ = new MRErrorThread();
errThread_.start();
- } catch (Exception e) {
+ } catch (IOException e) {
logStackTrace(e);
LOG.error("configuration exception", e);
throw new RuntimeException("configuration exception", e);
- }
+ } catch (InterruptedException e) {
+ logStackTrace(e);
+ LOG.error("configuration exception", e);
+ throw new RuntimeException("configuration exception", e);
+ }
}
void setStreamJobDetails(JobConf job) {
Modified:
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
Fri May 8 10:10:33 2009
@@ -28,6 +28,8 @@
import java.util.List;
import java.util.jar.*;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
@@ -40,6 +42,9 @@
*/
public class StreamUtil {
+ private static final Log LOG =
+ LogFactory.getLog(StreamUtil.class.getName());
+
/** It may seem strange to silently switch behaviour when a String
* is not a classname; the reason is simplified Usage:<pre>
* -mapper [classname | program ]
@@ -112,7 +117,8 @@
InputStream in = jar.getInputStream(entry);
try {
File file = new File(toDir, entry.getName());
- file.getParentFile().mkdirs();
+ boolean b = file.getParentFile().mkdirs();
+ if (!b) { LOG.warn("Ignoring failure of mkdirs"); }
OutputStream out = new FileOutputStream(file);
try {
byte[] buffer = new byte[8192];
@@ -195,7 +201,6 @@
}
if (numBytes >= KB) {
u = numBytes / KB;
- numBytes -= u * KB;
buf.append(u).append(" KB ");
}
buf.append(u).append(" B"); //even if zero
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/LocalFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/LocalFileSystem.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/LocalFileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/LocalFileSystem.java Fri
May 8 10:10:33 2009
@@ -97,12 +97,16 @@
File badFile = new File(badDir, f.getName()+suffix);
LOG.warn("Moving bad file " + f + " to " + badFile);
in.close(); // close it first
- f.renameTo(badFile); // rename it
-
+ boolean b = f.renameTo(badFile); // rename it
+ if (!b) {
+ LOG.warn("Ignoring failure of renameTo");
+ }
// move checksum file too
File checkFile = ((RawLocalFileSystem)fs).pathToFile(getChecksumFile(p));
- checkFile.renameTo(new File(badDir, checkFile.getName()+suffix));
-
+ b = checkFile.renameTo(new File(badDir, checkFile.getName()+suffix));
+ if (!b) {
+ LOG.warn("Ignoring failure of renameTo");
+ }
} catch (IOException e) {
LOG.warn("Error moving bad file " + p + ": " + e);
}
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
Fri May 8 10:10:33 2009
@@ -33,6 +33,8 @@
import java.util.Set;
import java.util.TreeSet;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3.INode.FileType;
@@ -74,6 +76,9 @@
private int bufferSize;
+ private static final Log LOG =
+ LogFactory.getLog(Jets3tFileSystemStore.class.getName());
+
public void initialize(URI uri, Configuration conf) throws IOException {
this.conf = conf;
@@ -215,7 +220,10 @@
closeQuietly(out);
out = null; // to prevent a second close
if (fileBlock != null) {
- fileBlock.delete();
+ boolean b = fileBlock.delete();
+ if (!b) {
+ LOG.warn("Ignoring failed delete");
+ }
}
throw e;
} finally {
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java Fri
May 8 10:10:33 2009
@@ -23,6 +23,8 @@
import java.io.FileInputStream;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -46,6 +48,10 @@
private long blockEnd = -1;
private FileSystem.Statistics stats;
+
+ private static final Log LOG =
+ LogFactory.getLog(S3InputStream.class.getName());
+
@Deprecated
public S3InputStream(Configuration conf, FileSystemStore store,
@@ -175,7 +181,10 @@
blockStream = null;
}
if (blockFile != null) {
- blockFile.delete();
+ boolean b = blockFile.delete();
+ if (!b) {
+ LOG.warn("Ignoring failed delete");
+ }
}
super.close();
closed = true;
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java Fri
May 8 10:10:33 2009
@@ -26,6 +26,8 @@
import java.util.List;
import java.util.Random;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3.INode.FileType;
@@ -62,6 +64,10 @@
private List<Block> blocks = new ArrayList<Block>();
private Block nextBlock;
+
+ private static final Log LOG =
+ LogFactory.getLog(S3OutputStream.class.getName());
+
public S3OutputStream(Configuration conf, FileSystemStore store,
Path path, long blockSize, Progressable progress,
@@ -175,7 +181,10 @@
//
// Delete local backup, start new one
//
- backupFile.delete();
+ boolean b = backupFile.delete();
+ if (!b) {
+ LOG.warn("Ignoring failed delete");
+ }
backupFile = newBackupFile();
backupStream = new FileOutputStream(backupFile);
bytesWrittenToBlock = 0;
@@ -209,7 +218,10 @@
}
backupStream.close();
- backupFile.delete();
+ boolean b = backupFile.delete();
+ if (!b) {
+ LOG.warn("Ignoring failed delete");
+ }
super.close();
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/SequenceFile.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/SequenceFile.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/SequenceFile.java Fri May
8 10:10:33 2009
@@ -736,6 +736,17 @@
this.theMetadata.put(key, val);
}
}
+
+ public boolean equals(Object other) {
+ if (other == null) {
+ return false;
+ }
+ if (other.getClass() != this.getClass()) {
+ return false;
+ } else {
+ return equals((Metadata)other);
+ }
+ }
public boolean equals(Metadata other) {
if (other == null) return false;
@@ -2025,7 +2036,7 @@
* @return Returns the key length or -1 for end of file
* @throws IOException
*/
- public int nextRawKey(DataOutputBuffer key)
+ public synchronized int nextRawKey(DataOutputBuffer key)
throws IOException {
if (!blockCompressed) {
recordLength = readRecordLength();
@@ -2202,7 +2213,7 @@
}
/** Returns true iff the previous call to next passed a sync mark.*/
- public boolean syncSeen() { return syncSeen; }
+ public synchronized boolean syncSeen() { return syncSeen; }
/** Return the current byte position in the input file. */
public synchronized long getPosition() throws IOException {
@@ -3173,6 +3184,13 @@
if (super.shouldPreserveInput()) return;
parentContainer.cleanup();
}
+
+ public boolean equals(Object o) {
+ if (!(o instanceof LinkedSegmentsDescriptor)) {
+ return false;
+ }
+ return super.equals(o);
+ }
} //SequenceFile.Sorter.LinkedSegmentsDescriptor
/** The class that defines a container for segments to be merged. Primarily
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
Fri May 8 10:10:33 2009
@@ -259,7 +259,7 @@
setDictionary(stream, b, off, len);
}
- public boolean needsInput() {
+ public synchronized boolean needsInput() {
// Consume remaining compressed data?
if (compressedDirectBuf.remaining() > 0) {
return false;
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Client.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Client.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Client.java Fri May 8
10:10:33 2009
@@ -168,6 +168,10 @@
this.value = value;
callComplete();
}
+
+ public synchronized Writable getValue() {
+ return value;
+ }
}
/** Thread that reads responses and notifies callers. Each connection owns a
@@ -614,7 +618,7 @@
/** Collect a result. */
public synchronized void callComplete(ParallelCall call) {
- values[call.index] = call.value; // store the value
+ values[call.index] = call.getValue(); // store the value
count++; // count it
if (count == size) // if all values are in
notify(); // then notify waiting caller
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Server.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Server.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/ipc/Server.java Fri May 8
10:10:33 2009
@@ -315,8 +315,8 @@
while (running) {
SelectionKey key = null;
try {
- selector.select();
- Iterator<SelectionKey> iter = selector.selectedKeys().iterator();
+ getSelector().select();
+ Iterator<SelectionKey> iter =
getSelector().selectedKeys().iterator();
while (iter.hasNext()) {
key = iter.next();
iter.remove();
@@ -393,7 +393,8 @@
channel.configureBlocking(false);
channel.socket().setTcpNoDelay(tcpNoDelay);
- SelectionKey readKey = channel.register(selector,
SelectionKey.OP_READ);
+ SelectionKey readKey = channel.register(getSelector(),
+ SelectionKey.OP_READ);
c = new Connection(readKey, channel, System.currentTimeMillis());
readKey.attach(c);
synchronized (connectionList) {
@@ -450,6 +451,8 @@
}
}
}
+
+ synchronized Selector getSelector() { return selector; }
}
// Sends responses of RPC back to clients.
@@ -721,7 +724,7 @@
Subject user = null;
// Fake 'call' for failed authorization response
- private final int AUTHROIZATION_FAILED_CALLID = -1;
+ private static final int AUTHROIZATION_FAILED_CALLID = -1;
private final Call authFailedCall =
new Call(AUTHROIZATION_FAILED_CALLID, null, null);
private ByteArrayOutputStream authFailedResponse = new
ByteArrayOutputStream();
@@ -914,7 +917,9 @@
dataLengthBuffer = null;
if (!channel.isOpen())
return;
- try {socket.shutdownOutput();} catch(Exception e) {}
+ try {socket.shutdownOutput();} catch(Exception e) {
+ LOG.warn("Ignoring socket shutdown exception");
+ }
if (channel.isOpen()) {
try {channel.close();} catch(Exception e) {}
}
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java
Fri May 8 10:10:33 2009
@@ -197,6 +197,7 @@
String propertyValue = properties.getProperty(propertyName);
setAttribute(propertyName, propertyValue);
}
+ is.close();
}
}
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/record/meta/StructTypeID.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/record/meta/StructTypeID.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/record/meta/StructTypeID.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/record/meta/StructTypeID.java
Fri May 8 10:10:33 2009
@@ -147,5 +147,10 @@
throw new IOException("Unknown type read");
}
}
-
+
+ public boolean equals(Object o) {
+ return super.equals(o);
+ }
+
+ public int hashCode() { return super.hashCode(); }
}
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/util/ProgramDriver.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/util/ProgramDriver.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/util/ProgramDriver.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/util/ProgramDriver.java Fri
May 8 10:10:33 2009
@@ -106,13 +106,14 @@
* If it is found, it calls the main method in that class with the rest
* of the command line arguments.
* @param args The argument from the user. args[0] is the command to run.
+ * @return -1 on error, 0 on success
* @throws NoSuchMethodException
* @throws SecurityException
* @throws IllegalAccessException
* @throws IllegalArgumentException
* @throws Throwable Anything thrown by the example program's main
*/
- public void driver(String[] args)
+ public int driver(String[] args)
throws Throwable
{
// Make sure they gave us a program name.
@@ -120,7 +121,7 @@
System.out.println("An example program must be given as the" +
" first argument.");
printUsage(programs);
- System.exit(-1);
+ return -1;
}
// And that it is good.
@@ -128,7 +129,7 @@
if (pgm == null) {
System.out.println("Unknown program '" + args[0] + "' chosen.");
printUsage(programs);
- System.exit(-1);
+ return -1;
}
// Remove the leading argument and call main
@@ -137,6 +138,7 @@
new_args[i-1] = args[i];
}
pgm.invoke(new_args);
+ return 0;
}
}
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/util/Progress.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/util/Progress.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/util/Progress.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/util/Progress.java Fri May 8
10:10:33 2009
@@ -47,11 +47,14 @@
public synchronized Progress addPhase() {
Progress phase = new Progress();
phases.add(phase);
- phase.parent = this;
+ phase.setParent(this);
progressPerPhase = 1.0f / (float)phases.size();
return phase;
}
+ synchronized Progress getParent() { return parent; }
+ synchronized void setParent(Progress parent) { this.parent = parent; }
+
/** Called during execution to move to the next phase at this level in the
* tree. */
public synchronized void startNextPhase() {
@@ -90,7 +93,7 @@
// and the node's parent never changes. Still, it doesn't hurt.
public synchronized float get() {
Progress node = this;
- while (node.parent != null) { // find the root
+ while (node.getParent() != null) { // find the root
node = parent;
}
return node.getInternal();
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/util/RunJar.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/util/RunJar.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/util/RunJar.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/util/RunJar.java Fri May 8
10:10:33 2009
@@ -107,15 +107,19 @@
mainClassName = mainClassName.replaceAll("/", ".");
File tmpDir = new File(new Configuration().get("hadoop.tmp.dir"));
- tmpDir.mkdirs();
- if (!tmpDir.isDirectory()) {
+ boolean b = tmpDir.mkdirs();
+ if (!b || !tmpDir.isDirectory()) {
System.err.println("Mkdirs failed to create " + tmpDir);
System.exit(-1);
}
final File workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
- workDir.delete();
- workDir.mkdirs();
- if (!workDir.isDirectory()) {
+ b = workDir.delete();
+ if (!b) {
+ System.err.println("Delete failed for " + workDir);
+ System.exit(-1);
+ }
+ b = workDir.mkdirs();
+ if (!b || !workDir.isDirectory()) {
System.err.println("Mkdirs failed to create " + workDir);
System.exit(-1);
}
Modified:
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/DBCountPageView.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/DBCountPageView.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/DBCountPageView.java
(original)
+++
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/DBCountPageView.java
Fri May 8 10:10:33 2009
@@ -136,15 +136,15 @@
private void dropTables() {
String dropAccess = "DROP TABLE Access";
String dropPageview = "DROP TABLE Pageview";
-
+ Statement st = null;
try {
- Statement st = connection.createStatement();
+ st = connection.createStatement();
st.executeUpdate(dropAccess);
st.executeUpdate(dropPageview);
connection.commit();
st.close();
}catch (SQLException ex) {
- //ignore
+ try { if (st != null) { st.close(); } } catch (Exception e) {}
}
}
Modified:
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
---
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java
(original)
+++
hadoop/core/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java
Fri May 8 10:10:33 2009
@@ -63,10 +63,7 @@
pgd.addClass("teragen", TeraGen.class, "Generate data for the terasort");
pgd.addClass("terasort", TeraSort.class, "Run the terasort");
pgd.addClass("teravalidate", TeraValidate.class, "Checking results of
terasort");
- pgd.driver(argv);
-
- // Success
- exitCode = 0;
+ exitCode = pgd.driver(argv);
}
catch(Throwable e){
e.printStackTrace();
Modified: hadoop/core/trunk/src/test/findbugsExcludeFile.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/findbugsExcludeFile.xml?rev=772911&r1=772910&r2=772911&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/findbugsExcludeFile.xml (original)
+++ hadoop/core/trunk/src/test/findbugsExcludeFile.xml Fri May 8 10:10:33 2009
@@ -136,4 +136,81 @@
<Field name="kvindex" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
-</FindBugsFilter>
+
+ <!--
+ core changes
+ -->
+ <Match>
+ <Class name="~org.apache.hadoop.*" />
+ <Bug code="MS" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.fs.FileSystem" />
+ <Method name="checkPath" />
+ <Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.fs.kfs.KFSOutputStream" />
+ <Field name="path" />
+ <Bug pattern="URF_UNREAD_FIELD" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.fs.kfs.KosmosFileSystem" />
+ <Method name="initialize" />
+ <Bug pattern="DM_EXIT" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.io.Closeable" />
+ <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.security.AccessControlException" />
+ <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.record.meta.Utils" />
+ <Method name="skip" />
+ <Bug pattern="BC_UNCONFIRMED_CAST" />
+ </Match>
+
+ <!--
+ The compareTo method is actually a dummy method that just
+ throws excpetions. So, no need to override equals. Ignore
+ -->
+ <Match>
+ <Class name="org.apache.hadoop.record.meta.RecordTypeInfo" />
+ <Bug pattern="EQ_COMPARETO_USE_OBJECT_EQUALS" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.util.ProcfsBasedProcessTree" />
+ <Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
+ </Match>
+
+ <!--
+ Streaming, Examples
+ -->
+ <Match>
+ <Class name="org.apache.hadoop.streaming.StreamUtil$TaskId" />
+ <Bug pattern="URF_UNREAD_FIELD" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.examples.DBCountPageView" />
+ <Method name="verify" />
+ <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.examples.ContextFactory" />
+ <Method name="setAttributes" />
+ <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
+ </Match>
+
+ </FindBugsFilter>