http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java deleted file mode 100644 index 6f39f01..0000000 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java +++ /dev/null @@ -1,220 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.DataInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.EOFException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSInputStream; -import org.apache.hadoop.fs.FileSystem; - [email protected] [email protected] -class S3InputStream extends FSInputStream { - - private FileSystemStore store; - - private Block[] blocks; - - private boolean closed; - - private long fileLength; - - private long pos = 0; - - private File blockFile; - - private DataInputStream blockStream; - - private long blockEnd = -1; - - private FileSystem.Statistics stats; - - private static final Log LOG = - LogFactory.getLog(S3InputStream.class.getName()); - - - @Deprecated - public S3InputStream(Configuration conf, FileSystemStore store, - INode inode) { - this(conf, store, inode, null); - } - - public S3InputStream(Configuration conf, FileSystemStore store, - INode inode, FileSystem.Statistics stats) { - - this.store = store; - this.stats = stats; - this.blocks = inode.getBlocks(); - for (Block block : blocks) { - this.fileLength += block.getLength(); - } - } - - @Override - public synchronized long getPos() throws IOException { - return pos; - } - - @Override - public synchronized int available() throws IOException { - return (int) (fileLength - pos); - } - - @Override - public synchronized void seek(long targetPos) throws IOException { - String message = String.format("Cannot seek to %d", targetPos); - if (targetPos > fileLength) { - throw new EOFException(message + ": after EOF"); - } - if (targetPos < 0) { - throw new EOFException(message + ": negative"); - } - pos = targetPos; - blockEnd = -1; - } - - @Override - public synchronized boolean seekToNewSource(long targetPos) throws IOException { - return false; - } - - @Override - public synchronized int read() throws IOException { - if (closed) { - throw new IOException("Stream closed"); - } - int result = -1; - if (pos < fileLength) { - if (pos > blockEnd) { - blockSeekTo(pos); - } - result = blockStream.read(); - if (result >= 0) { - pos++; - } - } - if (stats != null && result >= 0) { - stats.incrementBytesRead(1); - } - return result; - } - - @Override - public synchronized int read(byte buf[], int off, int len) throws IOException { - if (closed) { - throw new IOException("Stream closed"); - } - if (pos < fileLength) { - if (pos > blockEnd) { - blockSeekTo(pos); - } - int realLen = (int) Math.min((long) len, (blockEnd - pos + 1L)); - int result = blockStream.read(buf, off, realLen); - if (result >= 0) { - pos += result; - } - if (stats != null && result > 0) { - stats.incrementBytesRead(result); - } - return result; - } - return -1; - } - - private synchronized void blockSeekTo(long target) throws IOException { - // - // Compute desired block - // - int targetBlock = -1; - long targetBlockStart = 0; - long targetBlockEnd = 0; - for (int i = 0; i < blocks.length; i++) { - long blockLength = blocks[i].getLength(); - targetBlockEnd = targetBlockStart + blockLength - 1; - - if (target >= targetBlockStart && target <= targetBlockEnd) { - targetBlock = i; - break; - } else { - targetBlockStart = targetBlockEnd + 1; - } - } - if (targetBlock < 0) { - throw new IOException( - "Impossible situation: could not find target position " + target); - } - long offsetIntoBlock = target - targetBlockStart; - - // read block blocks[targetBlock] from position offsetIntoBlock - - this.blockFile = store.retrieveBlock(blocks[targetBlock], offsetIntoBlock); - - this.pos = target; - this.blockEnd = targetBlockEnd; - this.blockStream = new DataInputStream(new FileInputStream(blockFile)); - - } - - @Override - public void close() throws IOException { - if (closed) { - return; - } - if (blockStream != null) { - blockStream.close(); - blockStream = null; - } - if (blockFile != null) { - boolean b = blockFile.delete(); - if (!b) { - LOG.warn("Ignoring failed delete"); - } - } - super.close(); - closed = true; - } - - /** - * We don't support marks. - */ - @Override - public boolean markSupported() { - return false; - } - - @Override - public void mark(int readLimit) { - // Do nothing - } - - @Override - public void reset() throws IOException { - throw new IOException("Mark not supported"); - } - -}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3OutputStream.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3OutputStream.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3OutputStream.java deleted file mode 100644 index 761f2ce..0000000 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3OutputStream.java +++ /dev/null @@ -1,235 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.s3.INode.FileType; -import org.apache.hadoop.util.Progressable; - [email protected] [email protected] -class S3OutputStream extends OutputStream { - - private Configuration conf; - - private int bufferSize; - - private FileSystemStore store; - - private Path path; - - private long blockSize; - - private File backupFile; - - private OutputStream backupStream; - - private Random r = new Random(); - - private boolean closed; - - private int pos = 0; - - private long filePos = 0; - - private int bytesWrittenToBlock = 0; - - private byte[] outBuf; - - private List<Block> blocks = new ArrayList<Block>(); - - private Block nextBlock; - - private static final Log LOG = - LogFactory.getLog(S3OutputStream.class.getName()); - - - public S3OutputStream(Configuration conf, FileSystemStore store, - Path path, long blockSize, Progressable progress, - int buffersize) throws IOException { - - this.conf = conf; - this.store = store; - this.path = path; - this.blockSize = blockSize; - this.backupFile = newBackupFile(); - this.backupStream = new FileOutputStream(backupFile); - this.bufferSize = buffersize; - this.outBuf = new byte[bufferSize]; - - } - - private File newBackupFile() throws IOException { - File dir = new File(conf.get("fs.s3.buffer.dir")); - if (!dir.exists() && !dir.mkdirs()) { - throw new IOException("Cannot create S3 buffer directory: " + dir); - } - File result = File.createTempFile("output-", ".tmp", dir); - result.deleteOnExit(); - return result; - } - - public long getPos() throws IOException { - return filePos; - } - - @Override - public synchronized void write(int b) throws IOException { - if (closed) { - throw new IOException("Stream closed"); - } - - if ((bytesWrittenToBlock + pos == blockSize) || (pos >= bufferSize)) { - flush(); - } - outBuf[pos++] = (byte) b; - filePos++; - } - - @Override - public synchronized void write(byte b[], int off, int len) throws IOException { - if (closed) { - throw new IOException("Stream closed"); - } - while (len > 0) { - int remaining = bufferSize - pos; - int toWrite = Math.min(remaining, len); - System.arraycopy(b, off, outBuf, pos, toWrite); - pos += toWrite; - off += toWrite; - len -= toWrite; - filePos += toWrite; - - if ((bytesWrittenToBlock + pos >= blockSize) || (pos == bufferSize)) { - flush(); - } - } - } - - @Override - public synchronized void flush() throws IOException { - if (closed) { - throw new IOException("Stream closed"); - } - - if (bytesWrittenToBlock + pos >= blockSize) { - flushData((int) blockSize - bytesWrittenToBlock); - } - if (bytesWrittenToBlock == blockSize) { - endBlock(); - } - flushData(pos); - } - - private synchronized void flushData(int maxPos) throws IOException { - int workingPos = Math.min(pos, maxPos); - - if (workingPos > 0) { - // - // To the local block backup, write just the bytes - // - backupStream.write(outBuf, 0, workingPos); - - // - // Track position - // - bytesWrittenToBlock += workingPos; - System.arraycopy(outBuf, workingPos, outBuf, 0, pos - workingPos); - pos -= workingPos; - } - } - - private synchronized void endBlock() throws IOException { - // - // Done with local copy - // - backupStream.close(); - - // - // Send it to S3 - // - // TODO: Use passed in Progressable to report progress. - nextBlockOutputStream(); - store.storeBlock(nextBlock, backupFile); - internalClose(); - - // - // Delete local backup, start new one - // - boolean b = backupFile.delete(); - if (!b) { - LOG.warn("Ignoring failed delete"); - } - backupFile = newBackupFile(); - backupStream = new FileOutputStream(backupFile); - bytesWrittenToBlock = 0; - } - - private synchronized void nextBlockOutputStream() throws IOException { - long blockId = r.nextLong(); - while (store.blockExists(blockId)) { - blockId = r.nextLong(); - } - nextBlock = new Block(blockId, bytesWrittenToBlock); - blocks.add(nextBlock); - bytesWrittenToBlock = 0; - } - - private synchronized void internalClose() throws IOException { - INode inode = new INode(FileType.FILE, blocks.toArray(new Block[blocks - .size()])); - store.storeINode(path, inode); - } - - @Override - public synchronized void close() throws IOException { - if (closed) { - return; - } - - flush(); - if (filePos == 0 || bytesWrittenToBlock != 0) { - endBlock(); - } - - backupStream.close(); - boolean b = backupFile.delete(); - if (!b) { - LOG.warn("Ignoring failed delete"); - } - - super.close(); - - closed = true; - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/VersionMismatchException.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/VersionMismatchException.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/VersionMismatchException.java deleted file mode 100644 index ccc8969..0000000 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/VersionMismatchException.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.fs.s3; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * Thrown when Hadoop cannot read the version of the data stored - * in {@link S3FileSystem}. - */ [email protected] [email protected] -public class VersionMismatchException extends S3FileSystemException { - private static final long serialVersionUID = 1L; - - public VersionMismatchException(String clientVersion, String dataVersion) { - super("Version mismatch: client expects version " + clientVersion + - ", but data has version " + - (dataVersion == null ? "[unversioned]" : dataVersion)); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/package.html ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/package.html b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/package.html deleted file mode 100644 index dd601e1..0000000 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/package.html +++ /dev/null @@ -1,55 +0,0 @@ -<html> - -<!-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<body> - -<p>A distributed, block-based implementation of {@link -org.apache.hadoop.fs.FileSystem} that uses <a href="http://aws.amazon.com/s3">Amazon S3</a> -as a backing store.</p> - -<p> -Files are stored in S3 as blocks (represented by -{@link org.apache.hadoop.fs.s3.Block}), which have an ID and a length. -Block metadata is stored in S3 as a small record (represented by -{@link org.apache.hadoop.fs.s3.INode}) using the URL-encoded -path string as a key. Inodes record the file type (regular file or directory) and the list of blocks. -This design makes it easy to seek to any given position in a file by reading the inode data to compute -which block to access, then using S3's support for -<a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.2">HTTP Range</a> headers -to start streaming from the correct position. -Renames are also efficient since only the inode is moved (by a DELETE followed by a PUT since -S3 does not support renames). -</p> -<p> -For a single file <i>/dir1/file1</i> which takes two blocks of storage, the file structure in S3 -would be something like this: -</p> -<pre> -/ -/dir1 -/dir1/file1 -block-6415776850131549260 -block-3026438247347758425 -</pre> -<p> -Inodes start with a leading <code>/</code>, while blocks are prefixed with <code>block-</code>. -</p> - -</body> -</html> http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java index a10d6f2..c9c0f98 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java @@ -37,8 +37,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSExceptionMessages; -import org.apache.hadoop.fs.s3.S3Credentials; -import org.apache.hadoop.fs.s3.S3Exception; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; import org.jets3t.service.S3Service; http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java index d646726..bebb09e 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java @@ -53,7 +53,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.fs.s3.S3Exception; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; @@ -62,12 +61,19 @@ import org.apache.hadoop.util.Progressable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_BUFFER_DIR_DEFAULT; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_BUFFER_DIR_KEY; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_MAX_RETRIES_DEFAUL; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_MAX_RETRIES_KEY; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_SLEEP_TIME_DEFAULT; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_SLEEP_TIME_KEY; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.addDeprecatedConfigKeys; + /** * A {@link FileSystem} for reading and writing files stored on * <a href="http://aws.amazon.com/s3">Amazon S3</a>. - * Unlike {@link org.apache.hadoop.fs.s3.S3FileSystem} this implementation - * stores files on S3 in their - * native form so they can be read by other S3 tools. + * This implementation stores files on S3 in their native form so they can be + * read by other S3 tools. * <p> * A note about directories. S3 of course has no "native" support for them. * The idiom we choose then is: for any directory created by this class, @@ -85,8 +91,6 @@ import org.slf4j.LoggerFactory; * is never returned. * </li> * </ul> - * - * @see org.apache.hadoop.fs.s3.S3FileSystem */ @InterfaceAudience.Public @InterfaceStability.Stable @@ -98,7 +102,12 @@ public class NativeS3FileSystem extends FileSystem { private static final String FOLDER_SUFFIX = "_$folder$"; static final String PATH_DELIMITER = Path.SEPARATOR; private static final int S3_MAX_LISTING_LENGTH = 1000; - + + static { + // Add the deprecated config keys + addDeprecatedConfigKeys(); + } + static class NativeS3FsInputStream extends FSInputStream { private NativeFileSystemStore store; @@ -257,8 +266,10 @@ public class NativeS3FileSystem extends FileSystem { } private File newBackupFile() throws IOException { - if (lDirAlloc == null) { - lDirAlloc = new LocalDirAllocator("fs.s3.buffer.dir"); + if (conf.get(S3_NATIVE_BUFFER_DIR_KEY, null) != null) { + lDirAlloc = new LocalDirAllocator(S3_NATIVE_BUFFER_DIR_KEY); + } else { + lDirAlloc = new LocalDirAllocator(S3_NATIVE_BUFFER_DIR_DEFAULT); } File result = lDirAlloc.createTmpFileForWrite("output-", LocalDirAllocator.SIZE_UNKNOWN, conf); result.deleteOnExit(); @@ -342,8 +353,9 @@ public class NativeS3FileSystem extends FileSystem { NativeFileSystemStore store = new Jets3tNativeFileSystemStore(); RetryPolicy basePolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep( - conf.getInt("fs.s3.maxRetries", 4), - conf.getLong("fs.s3.sleepTimeSeconds", 10), TimeUnit.SECONDS); + conf.getInt(S3_NATIVE_MAX_RETRIES_KEY, S3_NATIVE_MAX_RETRIES_DEFAUL), + conf.getLong(S3_NATIVE_SLEEP_TIME_KEY, S3_NATIVE_SLEEP_TIME_DEFAULT), + TimeUnit.SECONDS); Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = new HashMap<Class<? extends Exception>, RetryPolicy>(); exceptionToPolicyMap.put(IOException.class, basePolicy); http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Credentials.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Credentials.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Credentials.java new file mode 100644 index 0000000..713b149 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Credentials.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3native; + +import java.io.IOException; +import java.net.URI; + +import com.google.common.base.Preconditions; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; + +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_AWS_ACCESS_KEY_ID; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_AWS_SECRET_ACCESS_KEY; + +/** + * <p> + * Extracts AWS credentials from the filesystem URI or configuration. + * </p> + */ [email protected] [email protected] +public class S3Credentials { + + private String accessKey; + private String secretAccessKey; + + /** + * @param uri bucket URI optionally containing username and password. + * @param conf configuration + * @throws IllegalArgumentException if credentials for S3 cannot be + * determined. + * @throws IOException if credential providers are misconfigured and we have + * to talk to them. + */ + public void initialize(URI uri, Configuration conf) throws IOException { + Preconditions.checkArgument(uri.getHost() != null, + "Invalid hostname in URI " + uri); + + String userInfo = uri.getUserInfo(); + if (userInfo != null) { + int index = userInfo.indexOf(':'); + if (index != -1) { + accessKey = userInfo.substring(0, index); + secretAccessKey = userInfo.substring(index + 1); + } else { + accessKey = userInfo; + } + } + + if (accessKey == null) { + accessKey = conf.getTrimmed(S3_NATIVE_AWS_ACCESS_KEY_ID); + } + if (secretAccessKey == null) { + final char[] pass = conf.getPassword(S3_NATIVE_AWS_SECRET_ACCESS_KEY); + if (pass != null) { + secretAccessKey = (new String(pass)).trim(); + } + } + + final String scheme = uri.getScheme(); + Preconditions.checkArgument(!(accessKey == null && secretAccessKey == null), + "AWS Access Key ID and Secret Access Key must be specified as the " + + "username or password (respectively) of a " + scheme + " URL, or " + + "by setting the " + S3_NATIVE_AWS_ACCESS_KEY_ID + " or " + + S3_NATIVE_AWS_SECRET_ACCESS_KEY + " properties (respectively)."); + Preconditions.checkArgument(accessKey != null, + "AWS Access Key ID must be specified as the username of a " + scheme + + " URL, or by setting the " + S3_NATIVE_AWS_ACCESS_KEY_ID + + " property."); + Preconditions.checkArgument(secretAccessKey != null, + "AWS Secret Access Key must be specified as the password of a " + scheme + + " URL, or by setting the " + S3_NATIVE_AWS_SECRET_ACCESS_KEY + + " property."); + } + + public String getAccessKey() { + return accessKey; + } + + public String getSecretAccessKey() { + return secretAccessKey; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Exception.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Exception.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Exception.java new file mode 100644 index 0000000..9258fd7 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3Exception.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3native; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * Thrown if there is a problem communicating with Amazon S3. + */ [email protected] [email protected] +public class S3Exception extends IOException { + + private static final long serialVersionUID = 1L; + + public S3Exception(Throwable t) { + super(t); + } + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java index 75884fa..7c8b345 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java @@ -20,6 +20,8 @@ package org.apache.hadoop.fs.s3native; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configuration.DeprecationDelta; import org.apache.hadoop.fs.CommonConfigurationKeys; /** @@ -43,5 +45,22 @@ public class S3NativeFileSystemConfigKeys extends CommonConfigurationKeys { public static final String S3_NATIVE_CLIENT_WRITE_PACKET_SIZE_KEY = "s3native.client-write-packet-size"; public static final int S3_NATIVE_CLIENT_WRITE_PACKET_SIZE_DEFAULT = 64*1024; + static final String S3_NATIVE_BUFFER_DIR_KEY = "fs.s3n.buffer.dir"; + static final String S3_NATIVE_BUFFER_DIR_DEFAULT = "${hadoop.tmp.dir}/s3n"; + static final String S3_NATIVE_MAX_RETRIES_KEY = "fs.s3n.maxRetries"; + static final int S3_NATIVE_MAX_RETRIES_DEFAUL = 4; + static final String S3_NATIVE_SLEEP_TIME_KEY = "fs.s3n.sleepTimeSeconds"; + static final int S3_NATIVE_SLEEP_TIME_DEFAULT = 10; + static final String S3_NATIVE_AWS_ACCESS_KEY_ID = "fs.s3n.awsAccessKeyId"; + static final String S3_NATIVE_AWS_SECRET_ACCESS_KEY = + "fs.s3n.awsSecretAccessKey"; + + static void addDeprecatedConfigKeys() { + Configuration.addDeprecations(new DeprecationDelta[]{ + new DeprecationDelta("fs.s3.buffer.dir", S3_NATIVE_BUFFER_DIR_KEY), + new DeprecationDelta("fs.s3.maxRetries", S3_NATIVE_MAX_RETRIES_KEY), + new DeprecationDelta("fs.s3.sleepTimeSeconds", S3_NATIVE_SLEEP_TIME_KEY) + }); + } + } - http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/package.html ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/package.html b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/package.html index 24b9b1d..4d3bde9 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/package.html +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/package.html @@ -23,9 +23,8 @@ A distributed implementation of {@link org.apache.hadoop.fs.FileSystem} for reading and writing files on <a href="http://aws.amazon.com/s3">Amazon S3</a>. -Unlike {@link org.apache.hadoop.fs.s3.S3FileSystem}, which is block-based, -this implementation stores -files on S3 in their native form for interoperability with other S3 tools. +This implementation stores files on S3 in their native form for interoperability +with other S3 tools. </p> </body> http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem b/hadoop-tools/hadoop-aws/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem index 0e3c42a..e606fd9 100644 --- a/hadoop-tools/hadoop-aws/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem +++ b/hadoop-tools/hadoop-aws/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem @@ -13,6 +13,5 @@ # See the License for the specific language governing permissions and # limitations under the License. -org.apache.hadoop.fs.s3.S3FileSystem org.apache.hadoop.fs.s3native.NativeS3FileSystem org.apache.hadoop.fs.s3a.S3AFileSystem http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md index a2bc2a9..2b64754 100644 --- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md +++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md @@ -28,8 +28,8 @@ HADOOP_OPTIONAL_TOOLS in hadoop-env.sh has 'hadoop-aws' in the list. ### Features -1. The "classic" `s3:` filesystem for storing objects in Amazon S3 Storage. **NOTE: `s3:` is being phased out. Use `s3n:` or `s3a:` instead.** + 1. The second-generation, `s3n:` filesystem, making it easy to share data between hadoop and other applications via the S3 object store. 1. The third generation, `s3a:` filesystem. Designed to be a switch in @@ -972,7 +972,6 @@ each filesystem for its testing. 1. `test.fs.s3n.name` : the URL of the bucket for S3n tests 1. `test.fs.s3a.name` : the URL of the bucket for S3a tests -2. `test.fs.s3.name` : the URL of the bucket for "S3" tests The contents of each bucket will be destroyed during the test process: do not use the bucket for any purpose other than testing. Furthermore, for @@ -993,21 +992,6 @@ Example: <name>test.fs.s3a.name</name> <value>s3a://test-aws-s3a/</value> </property> - - <property> - <name>test.fs.s3.name</name> - <value>s3://test-aws-s3/</value> - </property> - - <property> - <name>fs.s3.awsAccessKeyId</name> - <value>DONOTPCOMMITTHISKEYTOSCM</value> - </property> - - <property> - <name>fs.s3.awsSecretAccessKey</name> - <value>DONOTEVERSHARETHISSECRETKEY!</value> - </property> <property> <name>fs.s3n.awsAccessKeyId</name> @@ -1051,18 +1035,6 @@ The standard S3 authentication details must also be provided. This can be through copy-and-paste of the `auth-keys.xml` credentials, or it can be through direct XInclude inclusion. -### s3:// - -The filesystem name must be defined in the property `fs.contract.test.fs.s3`. - - -Example: - - <property> - <name>fs.contract.test.fs.s3</name> - <value>s3://test-aws-s3/</value> - </property> - ### s3n:// @@ -1123,12 +1095,6 @@ Example: href="/home/testuser/.ssh/auth-keys.xml"/> <property> - <name>fs.contract.test.fs.s3</name> - <value>s3://test-aws-s3/</value> - </property> - - - <property> <name>fs.contract.test.fs.s3a</name> <value>s3a://test-aws-s3a/</value> </property> http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/S3Contract.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/S3Contract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/S3Contract.java deleted file mode 100644 index 483b78c..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/S3Contract.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.contract.AbstractBondedFSContract; - -/** - * The contract of S3: only enabled if the test bucket is provided. - */ -public class S3Contract extends AbstractBondedFSContract { - - public static final String CONTRACT_XML = "contract/s3.xml"; - - - public S3Contract(Configuration conf) { - super(conf); - //insert the base features - addConfResource(CONTRACT_XML); - } - - @Override - public String getScheme() { - return "s3"; - } - - @Override - public Path getTestPath() { - String testUniqueForkId = System.getProperty("test.unique.fork.id"); - return testUniqueForkId == null ? super.getTestPath() : - new Path("/" + testUniqueForkId, "test"); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractCreate.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractCreate.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractCreate.java deleted file mode 100644 index b8c758c..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractCreate.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractCreateTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.ContractTestUtils; - -public class TestS3ContractCreate extends AbstractContractCreateTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractDelete.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractDelete.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractDelete.java deleted file mode 100644 index 2d3cec7..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractDelete.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractDeleteTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; - -public class TestS3ContractDelete extends AbstractContractDeleteTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractMkdir.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractMkdir.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractMkdir.java deleted file mode 100644 index 992ce53..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractMkdir.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractMkdirTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.ContractTestUtils; - -public class TestS3ContractMkdir extends AbstractContractMkdirTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractOpen.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractOpen.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractOpen.java deleted file mode 100644 index 2a4ba31..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractOpen.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractOpenTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.ContractTestUtils; - -public class TestS3ContractOpen extends AbstractContractOpenTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRename.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRename.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRename.java deleted file mode 100644 index 68bdbda..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRename.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractRenameTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; - -public class TestS3ContractRename extends AbstractContractRenameTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRootDir.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRootDir.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRootDir.java deleted file mode 100644 index aeb68ba..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractRootDir.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; - -import org.junit.Ignore; -import org.junit.Test; - -/** - * root dir operations against an S3 bucket - */ -public class TestS3ContractRootDir extends AbstractContractRootDirectoryTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } - - @Override - @Test - @Ignore - public void testRmEmptyRootDirNonRecursive() { - } - - @Override - @Test - @Ignore - public void testRmRootRecursive() { - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractSeek.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractSeek.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractSeek.java deleted file mode 100644 index 5f9535f..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3/TestS3ContractSeek.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.s3; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractSeekTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.ContractTestUtils; - -import org.junit.Ignore; -import org.junit.Test; - -public class TestS3ContractSeek extends AbstractContractSeekTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3Contract(conf); - } - - @Override - @Test - @Ignore - public void testReadFullyZeroByteFile() { - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java deleted file mode 100644 index 2d43c8b..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java +++ /dev/null @@ -1,200 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.net.URI; -import java.util.HashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.s3.INode.FileType; - -/** - * A stub implementation of {@link FileSystemStore} for testing - * {@link S3FileSystem} without actually connecting to S3. - */ -public class InMemoryFileSystemStore implements FileSystemStore { - - private Configuration conf; - private SortedMap<Path, INode> inodes = new TreeMap<Path, INode>(); - private Map<Long, byte[]> blocks = new HashMap<Long, byte[]>(); - - @Override - public void initialize(URI uri, Configuration conf) { - this.conf = conf; - inodes.put(new Path("/"), INode.DIRECTORY_INODE); - } - - @Override - public String getVersion() throws IOException { - return "0"; - } - - @Override - public void deleteINode(Path path) throws IOException { - inodes.remove(normalize(path)); - } - - @Override - public void deleteBlock(Block block) throws IOException { - blocks.remove(block.getId()); - } - - @Override - public boolean inodeExists(Path path) throws IOException { - return inodes.containsKey(normalize(path)); - } - - @Override - public boolean blockExists(long blockId) throws IOException { - return blocks.containsKey(blockId); - } - - @Override - public INode retrieveINode(Path path) throws IOException { - return inodes.get(normalize(path)); - } - - @Override - public File retrieveBlock(Block block, long byteRangeStart) throws IOException { - byte[] data = blocks.get(block.getId()); - File file = createTempFile(); - BufferedOutputStream out = null; - try { - out = new BufferedOutputStream(new FileOutputStream(file)); - out.write(data, (int) byteRangeStart, data.length - (int) byteRangeStart); - } finally { - if (out != null) { - out.close(); - } - } - return file; - } - - private File createTempFile() throws IOException { - File dir = new File(conf.get("fs.s3.buffer.dir")); - if (!dir.exists() && !dir.mkdirs()) { - throw new IOException("Cannot create S3 buffer directory: " + dir); - } - File result = File.createTempFile("test-", ".tmp", dir); - result.deleteOnExit(); - return result; - } - - @Override - public Set<Path> listSubPaths(Path path) throws IOException { - Path normalizedPath = normalize(path); - // This is inefficient but more than adequate for testing purposes. - Set<Path> subPaths = new LinkedHashSet<Path>(); - for (Path p : inodes.tailMap(normalizedPath).keySet()) { - if (normalizedPath.equals(p.getParent())) { - subPaths.add(p); - } - } - return subPaths; - } - - @Override - public Set<Path> listDeepSubPaths(Path path) throws IOException { - Path normalizedPath = normalize(path); - String pathString = normalizedPath.toUri().getPath(); - if (!pathString.endsWith("/")) { - pathString += "/"; - } - // This is inefficient but more than adequate for testing purposes. - Set<Path> subPaths = new LinkedHashSet<Path>(); - for (Path p : inodes.tailMap(normalizedPath).keySet()) { - if (p.toUri().getPath().startsWith(pathString)) { - subPaths.add(p); - } - } - return subPaths; - } - - @Override - public void storeINode(Path path, INode inode) throws IOException { - inodes.put(normalize(path), inode); - } - - @Override - public void storeBlock(Block block, File file) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - byte[] buf = new byte[8192]; - int numRead; - BufferedInputStream in = null; - try { - in = new BufferedInputStream(new FileInputStream(file)); - while ((numRead = in.read(buf)) >= 0) { - out.write(buf, 0, numRead); - } - } finally { - if (in != null) { - in.close(); - } - } - blocks.put(block.getId(), out.toByteArray()); - } - - private Path normalize(Path path) { - if (!path.isAbsolute()) { - throw new IllegalArgumentException("Path must be absolute: " + path); - } - return new Path(path.toUri().getPath()); - } - - @Override - public void purge() throws IOException { - inodes.clear(); - blocks.clear(); - } - - @Override - public void dump() throws IOException { - StringBuilder sb = new StringBuilder(getClass().getSimpleName()); - sb.append(", \n"); - for (Map.Entry<Path, INode> entry : inodes.entrySet()) { - sb.append(entry.getKey()).append("\n"); - INode inode = entry.getValue(); - sb.append("\t").append(inode.getFileType()).append("\n"); - if (inode.getFileType() == FileType.DIRECTORY) { - continue; - } - for (int j = 0; j < inode.getBlocks().length; j++) { - sb.append("\t").append(inode.getBlocks()[j]).append("\n"); - } - } - System.out.println(sb); - - System.out.println(inodes.keySet()); - System.out.println(blocks.keySet()); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/Jets3tS3FileSystemContractTest.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/Jets3tS3FileSystemContractTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/Jets3tS3FileSystemContractTest.java deleted file mode 100644 index 53b3c03..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/Jets3tS3FileSystemContractTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.IOException; - -public class Jets3tS3FileSystemContractTest - extends S3FileSystemContractBaseTest { - - @Override - FileSystemStore getFileSystemStore() throws IOException { - return new Jets3tFileSystemStore(); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java deleted file mode 100644 index de106f8..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.IOException; -import java.net.URI; - -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystemContractBaseTest; -import org.junit.internal.AssumptionViolatedException; - -public abstract class S3FileSystemContractBaseTest - extends FileSystemContractBaseTest { - - public static final String KEY_TEST_FS = "test.fs.s3.name"; - private FileSystemStore store; - - abstract FileSystemStore getFileSystemStore() throws IOException; - - @Override - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - store = getFileSystemStore(); - fs = new S3FileSystem(store); - String fsname = conf.get(KEY_TEST_FS); - if (StringUtils.isEmpty(fsname)) { - throw new AssumptionViolatedException( - "No test FS defined in :" + KEY_TEST_FS); - } - fs.initialize(URI.create(fsname), conf); - } - - @Override - protected void tearDown() throws Exception { - store.purge(); - super.tearDown(); - } - - public void testCanonicalName() throws Exception { - assertNull("s3 doesn't support security token and shouldn't have canonical name", - fs.getCanonicalServiceName()); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3InMemoryFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3InMemoryFileSystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3InMemoryFileSystem.java deleted file mode 100644 index a4e9770..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/S3InMemoryFileSystem.java +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import org.apache.hadoop.fs.s3.S3FileSystem; -import org.apache.hadoop.fs.s3.InMemoryFileSystemStore; - -/** - * A helper implementation of {@link S3FileSystem} - * without actually connecting to S3 for unit testing. - */ -public class S3InMemoryFileSystem extends S3FileSystem { - public S3InMemoryFileSystem() { - super(new InMemoryFileSystemStore()); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestINode.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestINode.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestINode.java deleted file mode 100644 index 086a43e..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestINode.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.IOException; -import java.io.InputStream; - -import junit.framework.TestCase; - -import org.apache.hadoop.fs.s3.INode.FileType; - -public class TestINode extends TestCase { - - public void testSerializeFileWithSingleBlock() throws IOException { - Block[] blocks = { new Block(849282477840258181L, 128L) }; - INode inode = new INode(FileType.FILE, blocks); - - assertEquals("Length", 1L + 4 + 16, inode.getSerializedLength()); - InputStream in = inode.serialize(); - - INode deserialized = INode.deserialize(in); - - assertEquals("FileType", inode.getFileType(), deserialized.getFileType()); - Block[] deserializedBlocks = deserialized.getBlocks(); - assertEquals("Length", 1, deserializedBlocks.length); - assertEquals("Id", blocks[0].getId(), deserializedBlocks[0].getId()); - assertEquals("Length", blocks[0].getLength(), deserializedBlocks[0] - .getLength()); - - } - - public void testSerializeDirectory() throws IOException { - INode inode = INode.DIRECTORY_INODE; - assertEquals("Length", 1L, inode.getSerializedLength()); - InputStream in = inode.serialize(); - INode deserialized = INode.deserialize(in); - assertSame(INode.DIRECTORY_INODE, deserialized); - } - - public void testDeserializeNull() throws IOException { - assertNull(INode.deserialize(null)); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestInMemoryS3FileSystemContract.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestInMemoryS3FileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestInMemoryS3FileSystemContract.java deleted file mode 100644 index 5d66cf1..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestInMemoryS3FileSystemContract.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.IOException; - -public class TestInMemoryS3FileSystemContract - extends S3FileSystemContractBaseTest { - - @Override - FileSystemStore getFileSystemStore() throws IOException { - return new InMemoryFileSystemStore(); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java deleted file mode 100644 index bcd52c0..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java +++ /dev/null @@ -1,140 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.fs.s3; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.ProviderUtils; -import org.apache.hadoop.security.alias.CredentialProvider; -import org.apache.hadoop.security.alias.CredentialProviderFactory; - -import java.io.File; -import java.net.URI; - -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestName; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -public class TestS3Credentials { - public static final Log LOG = LogFactory.getLog(TestS3Credentials.class); - - @Rule - public final TestName test = new TestName(); - - @Before - public void announce() { - LOG.info("Running test " + test.getMethodName()); - } - - private static final String EXAMPLE_ID = "AKASOMEACCESSKEY"; - private static final String EXAMPLE_KEY = - "RGV0cm9pdCBSZ/WQgY2xl/YW5lZCB1cAEXAMPLE"; - - @Test - public void testInvalidHostnameWithUnderscores() throws Exception { - S3Credentials s3Credentials = new S3Credentials(); - try { - s3Credentials.initialize(new URI("s3://a:b@c_d"), new Configuration()); - fail("Should throw IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertEquals("Invalid hostname in URI s3://a:b@c_d", e.getMessage()); - } - } - - @Test - public void testPlaintextConfigPassword() throws Exception { - S3Credentials s3Credentials = new S3Credentials(); - Configuration conf = new Configuration(); - conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID); - conf.set("fs.s3.awsSecretAccessKey", EXAMPLE_KEY); - s3Credentials.initialize(new URI("s3://foobar"), conf); - assertEquals("Could not retrieve proper access key", EXAMPLE_ID, - s3Credentials.getAccessKey()); - assertEquals("Could not retrieve proper secret", EXAMPLE_KEY, - s3Credentials.getSecretAccessKey()); - } - - @Test - public void testPlaintextConfigPasswordWithWhitespace() throws Exception { - S3Credentials s3Credentials = new S3Credentials(); - Configuration conf = new Configuration(); - conf.set("fs.s3.awsAccessKeyId", "\r\n " + EXAMPLE_ID + - " \r\n"); - conf.set("fs.s3.awsSecretAccessKey", "\r\n " + EXAMPLE_KEY + - " \r\n"); - s3Credentials.initialize(new URI("s3://foobar"), conf); - assertEquals("Could not retrieve proper access key", EXAMPLE_ID, - s3Credentials.getAccessKey()); - assertEquals("Could not retrieve proper secret", EXAMPLE_KEY, - s3Credentials.getSecretAccessKey()); - } - - @Rule - public final TemporaryFolder tempDir = new TemporaryFolder(); - - @Test - public void testCredentialProvider() throws Exception { - // set up conf to have a cred provider - final Configuration conf = new Configuration(); - final File file = tempDir.newFile("test.jks"); - final URI jks = ProviderUtils.nestURIForLocalJavaKeyStoreProvider( - file.toURI()); - conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, - jks.toString()); - - // add our creds to the provider - final CredentialProvider provider = - CredentialProviderFactory.getProviders(conf).get(0); - provider.createCredentialEntry("fs.s3.awsSecretAccessKey", - EXAMPLE_KEY.toCharArray()); - provider.flush(); - - // make sure S3Creds can retrieve things. - S3Credentials s3Credentials = new S3Credentials(); - conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID); - s3Credentials.initialize(new URI("s3://foobar"), conf); - assertEquals("Could not retrieve proper access key", EXAMPLE_ID, - s3Credentials.getAccessKey()); - assertEquals("Could not retrieve proper secret", EXAMPLE_KEY, - s3Credentials.getSecretAccessKey()); - } - - @Test(expected=IllegalArgumentException.class) - @Ignore - public void noSecretShouldThrow() throws Exception { - S3Credentials s3Credentials = new S3Credentials(); - Configuration conf = new Configuration(); - conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID); - s3Credentials.initialize(new URI("s3://foobar"), conf); - } - - @Test(expected=IllegalArgumentException.class) - @Ignore - public void noAccessIdShouldThrow() throws Exception { - S3Credentials s3Credentials = new S3Credentials(); - Configuration conf = new Configuration(); - conf.set("fs.s3.awsSecretAccessKey", EXAMPLE_KEY); - s3Credentials.initialize(new URI("s3://foobar"), conf); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3FileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3FileSystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3FileSystem.java deleted file mode 100644 index 4947f08..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3FileSystem.java +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.IOException; -import java.net.URI; - -import junit.framework.TestCase; - -import org.apache.hadoop.conf.Configuration; - -public class TestS3FileSystem extends TestCase { - - public static final URI EXPECTED = URI.create("s3://c"); - - public void testInitialization() throws IOException { - initializationTest("s3://a:b@c"); - initializationTest("s3://a:b@c/"); - initializationTest("s3://a:b@c/path"); - initializationTest("s3://a@c"); - initializationTest("s3://a@c/"); - initializationTest("s3://a@c/path"); - initializationTest("s3://c"); - initializationTest("s3://c/"); - initializationTest("s3://c/path"); - } - - private void initializationTest(String initializationUri) - throws IOException { - - S3FileSystem fs = new S3FileSystem(new InMemoryFileSystemStore()); - fs.initialize(URI.create(initializationUri), new Configuration()); - assertEquals(EXPECTED, fs.getUri()); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3InMemoryFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3InMemoryFileSystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3InMemoryFileSystem.java deleted file mode 100644 index fbdcd68..0000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3InMemoryFileSystem.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.URI; -import junit.framework.TestCase; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.Path; - -public class TestS3InMemoryFileSystem extends TestCase { - - private static final String TEST_PATH = "s3://test/data.txt"; - - private static final String TEST_DATA = "Sample data for testing."; - - private S3InMemoryFileSystem fs; - - @Override - public void setUp() throws IOException { - fs = new S3InMemoryFileSystem(); - fs.initialize(URI.create("s3://test/"), new Configuration()); - } - - public void testBasicReadWriteIO() throws IOException { - FSDataOutputStream writeStream = fs.create(new Path(TEST_PATH)); - writeStream.write(TEST_DATA.getBytes()); - writeStream.flush(); - writeStream.close(); - - FSDataInputStream readStream = fs.open(new Path(TEST_PATH)); - BufferedReader br = new BufferedReader(new InputStreamReader(readStream)); - String line = ""; - StringBuffer stringBuffer = new StringBuffer(); - while ((line = br.readLine()) != null) { - stringBuffer.append(line); - } - br.close(); - - assert(TEST_DATA.equals(stringBuffer.toString())); - } - - @Override - public void tearDown() throws IOException { - fs.close(); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/96fa0f84/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java index ac572aa..c082493 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java @@ -19,6 +19,8 @@ package org.apache.hadoop.fs.s3native; import static org.apache.hadoop.fs.s3native.NativeS3FileSystem.PATH_DELIMITER; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_BUFFER_DIR_KEY; +import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.addDeprecatedConfigKeys; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -48,7 +50,12 @@ import org.apache.hadoop.util.Time; * </p> */ public class InMemoryNativeFileSystemStore implements NativeFileSystemStore { - + + static { + // Add the deprecated config keys + addDeprecatedConfigKeys(); + } + private Configuration conf; private SortedMap<String, FileMetadata> metadataMap = @@ -114,7 +121,7 @@ public class InMemoryNativeFileSystemStore implements NativeFileSystemStore { } private File createTempFile() throws IOException { - File dir = new File(conf.get("fs.s3.buffer.dir")); + File dir = new File(conf.get(S3_NATIVE_BUFFER_DIR_KEY)); if (!dir.exists() && !dir.mkdirs()) { throw new IOException("Cannot create S3 buffer directory: " + dir); } --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
