Author: omalley
Date: Tue Feb 10 18:14:41 2009
New Revision: 743039
URL: http://svn.apache.org/viewvc?rev=743039&view=rev
Log:
HADOOP-5212. Back out files accidently included.
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java?rev=743039&r1=743038&r2=743039&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java Tue
Feb 10 18:14:41 2009
@@ -168,7 +168,7 @@
@Override
public void close() throws IOException {
if (closed) {
- return;
+ throw new IOException("Stream closed");
}
if (blockStream != null) {
blockStream.close();
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java?rev=743039&r1=743038&r2=743039&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java Tue
Feb 10 18:14:41 2009
@@ -200,7 +200,7 @@
@Override
public synchronized void close() throws IOException {
if (closed) {
- return;
+ throw new IOException("Stream closed");
}
flush();
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=743039&r1=743038&r2=743039&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
Tue Feb 10 18:14:41 2009
@@ -85,7 +85,6 @@
this.key = key;
}
- @Override
public synchronized int read() throws IOException {
int result = in.read();
if (result != -1) {
@@ -93,7 +92,6 @@
}
return result;
}
- @Override
public synchronized int read(byte[] b, int off, int len)
throws IOException {
@@ -104,22 +102,18 @@
return result;
}
- @Override
public void close() throws IOException {
in.close();
}
- @Override
public synchronized void seek(long pos) throws IOException {
in.close();
in = store.retrieve(key, pos);
this.pos = pos;
}
- @Override
public synchronized long getPos() throws IOException {
return pos;
}
- @Override
public boolean seekToNewSource(long targetPos) throws IOException {
return false;
}
@@ -170,7 +164,7 @@
@Override
public synchronized void close() throws IOException {
if (closed) {
- return;
+ throw new IOException("Stream closed");
}
backupStream.close();
@@ -266,7 +260,6 @@
}
/** This optional operation is not yet supported. */
- @Override
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
throw new IOException("Not supported");
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=743039&r1=743038&r2=743039&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Tue Feb 10
18:14:41 2009
@@ -17,85 +17,39 @@
*/
package org.apache.hadoop.hdfs;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.InetSocketAddress;
-import java.net.Socket;
-import java.nio.BufferOverflowException;
-import java.nio.ByteBuffer;
-import java.util.AbstractMap;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.zip.CRC32;
-
-import javax.net.SocketFactory;
-import javax.security.auth.login.LoginException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.ChecksumException;
-import org.apache.hadoop.fs.ContentSummary;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSInputChecker;
-import org.apache.hadoop.fs.FSInputStream;
-import org.apache.hadoop.fs.FSOutputSummer;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.Syncable;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.io.retry.RetryPolicies;
+import org.apache.hadoop.io.retry.RetryPolicy;
+import org.apache.hadoop.io.retry.RetryProxy;
+import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-import org.apache.hadoop.hdfs.protocol.Block;
-import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
-import org.apache.hadoop.hdfs.protocol.ClientProtocol;
-import org.apache.hadoop.hdfs.protocol.DataTransferProtocol;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.FSConstants;
-import org.apache.hadoop.hdfs.protocol.LocatedBlock;
-import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.net.NodeBase;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.hdfs.protocol.*;
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.MD5Hash;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.retry.RetryPolicies;
-import org.apache.hadoop.io.retry.RetryPolicy;
-import org.apache.hadoop.io.retry.RetryProxy;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UnixUserGroupInformation;
-import org.apache.hadoop.util.Daemon;
-import org.apache.hadoop.util.DataChecksum;
-import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.*;
+
+import org.apache.commons.logging.*;
+
+import java.io.*;
+import java.net.*;
+import java.util.*;
+import java.util.zip.CRC32;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.ConcurrentHashMap;
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+
+import javax.net.SocketFactory;
+import javax.security.auth.login.LoginException;
/********************************************************
* DFSClient can connect to a Hadoop Filesystem and
@@ -997,7 +951,6 @@
}
/** {...@inheritdoc} */
- @Override
public String toString() {
String s = getClass().getSimpleName();
if (LOG.isTraceEnabled()) {
@@ -1575,7 +1528,7 @@
public synchronized void close() throws IOException {
checkOpen();
if (closed) {
- return;
+ throw new IOException("Stream closed");
}
if ( blockReader != null ) {
@@ -2143,7 +2096,6 @@
private volatile boolean closed = false;
- @Override
public void run() {
while (!closed && clientRunning) {
@@ -2312,7 +2264,6 @@
this.targets = targets;
}
- @Override
public void run() {
this.setName("ResponseProcessor for block " + block);
@@ -2532,8 +2483,12 @@
}
private void isClosed() throws IOException {
- if (closed && lastException != null) {
+ if (closed) {
+ if (lastException != null) {
throw lastException;
+ } else {
+ throw new IOException("Stream closed.");
+ }
}
}
@@ -3055,8 +3010,6 @@
*/
@Override
public void close() throws IOException {
- if(closed)
- return;
closeInternal();
leasechecker.remove(src);
@@ -3190,7 +3143,6 @@
}
/** {...@inheritdoc} */
- @Override
public String toString() {
return getClass().getSimpleName() + "[clientName=" + clientName
+ ", ugi=" + ugi + "]";
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java?rev=743039&r1=743038&r2=743039&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java
Tue Feb 10 18:14:41 2009
@@ -23,6 +23,12 @@
import junit.framework.TestCase;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
/**
* <p>
* A collection of tests for the contract of the {...@link FileSystem}.
@@ -426,26 +432,6 @@
fs.exists(path("/test/new/newdir/dir/subdir/file2")));
}
- public void testInputStreamClosedTwice() throws IOException {
- //HADOOP-4760 according to Closeable#close() closing already-closed
- //streams should have no effect.
- Path src = path("/test/hadoop/file");
- createFile(src);
- FSDataInputStream in = fs.open(src);
- in.close();
- in.close();
- }
-
- public void testOutputStreamClosedTwice() throws IOException {
- //HADOOP-4760 according to Closeable#close() closing already-closed
- //streams should have no effect.
- Path src = path("/test/hadoop/file");
- FSDataOutputStream out = fs.create(src);
- out.writeChar('H'); //write some data
- out.close();
- out.close();
- }
-
protected Path path(String pathString) {
return new Path(pathString).makeQualified(fs);
}