Repository: hadoop Updated Branches: refs/heads/trunk 9195a6e30 -> 268ab4e02
http://git-wip-us.apache.org/repos/asf/hadoop/blob/268ab4e0/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java index 95d44cc..4ee0fcb 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACredentialsInURL.java @@ -19,9 +19,10 @@ package org.apache.hadoop.fs.s3a; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.test.LambdaTestUtils; + import org.junit.After; import org.junit.Assert; import org.junit.Assume; @@ -37,6 +38,7 @@ import java.net.URI; import java.net.URLEncoder; import java.nio.file.AccessDeniedException; +import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER; import static org.apache.hadoop.fs.s3a.S3ATestConstants.TEST_FS_S3A_NAME; import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeS3GuardState; @@ -120,11 +122,12 @@ public class ITestS3ACredentialsInURL extends Assert { /** * Set up some invalid credentials, verify login is rejected. - * @throws Throwable */ @Test public void testInvalidCredentialsFail() throws Throwable { Configuration conf = new Configuration(); + // use the default credential provider chain + conf.unset(AWS_CREDENTIALS_PROVIDER); String fsname = conf.getTrimmed(TEST_FS_S3A_NAME, ""); Assume.assumeNotNull(fsname); assumeS3GuardState(false, conf); @@ -132,14 +135,11 @@ public class ITestS3ACredentialsInURL extends Assert { URI testURI = createUriWithEmbeddedSecrets(original, "user", "//"); conf.set(TEST_FS_S3A_NAME, testURI.toString()); - try { - fs = S3ATestUtils.createTestFileSystem(conf); - FileStatus status = fs.getFileStatus(new Path("/")); - fail("Expected an AccessDeniedException, got " + status); - } catch (AccessDeniedException e) { - // expected - } - + LambdaTestUtils.intercept(AccessDeniedException.class, + () -> { + fs = S3ATestUtils.createTestFileSystem(conf); + return fs.getFileStatus(new Path("/")); + }); } private URI createUriWithEmbeddedSecrets(URI original, http://git-wip-us.apache.org/repos/asf/hadoop/blob/268ab4e0/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java index ddf2529..7a21876 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java @@ -186,4 +186,11 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase { fs.getFileChecksum(f, HELLO.length * 2)); } + @Test + public void testS3AToStringUnitialized() throws Throwable { + try(S3AFileSystem fs = new S3AFileSystem()) { + fs.toString(); + } + } + } http://git-wip-us.apache.org/repos/asf/hadoop/blob/268ab4e0/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java index f4e7c68..d6533bf 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java @@ -828,9 +828,24 @@ public final class S3ATestUtils { * Skip a test if the FS isn't marked as supporting magic commits. * @param fs filesystem */ - public void assumeMagicCommitEnabled(S3AFileSystem fs) { + public static void assumeMagicCommitEnabled(S3AFileSystem fs) { assume("Magic commit option disabled on " + fs, fs.hasCapability(CommitConstants.STORE_CAPABILITY_MAGIC_COMMITTER)); } + /** + * Probe for the configuration containing a specific credential provider. + * If the list is empty, there will be no match, even if the named provider + * is on the default list. + * + * @param conf configuration + * @param providerClassname provider class + * @return true if the configuration contains that classname. + */ + public static boolean authenticationContains(Configuration conf, + String providerClassname) { + return conf.getTrimmedStringCollection(AWS_CREDENTIALS_PROVIDER) + .contains(providerClassname); + } + } http://git-wip-us.apache.org/repos/asf/hadoop/blob/268ab4e0/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java index 82a8b84..a5be5de 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java @@ -106,6 +106,8 @@ public class TestS3AAWSCredentialsProvider { public void testDefaultChain() throws Exception { URI uri1 = new URI("s3a://bucket1"), uri2 = new URI("s3a://bucket2"); Configuration conf = new Configuration(); + // use the default credential provider chain + conf.unset(AWS_CREDENTIALS_PROVIDER); AWSCredentialProviderList list1 = S3AUtils.createAWSCredentialProviderSet( uri1, conf); AWSCredentialProviderList list2 = S3AUtils.createAWSCredentialProviderSet( http://git-wip-us.apache.org/repos/asf/hadoop/blob/268ab4e0/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBLocalClientFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBLocalClientFactory.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBLocalClientFactory.java index 0291acd..9894ac4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBLocalClientFactory.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBLocalClientFactory.java @@ -35,6 +35,7 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.s3a.DefaultS3ClientFactory; import org.apache.hadoop.net.ServerSocketUtil; +import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER; import static org.apache.hadoop.fs.s3a.S3AUtils.createAWSCredentialProviderSet; import static org.apache.hadoop.fs.s3a.s3guard.DynamoDBClientFactory.DefaultDynamoDBClientFactory.getRegion; @@ -80,6 +81,8 @@ public class DynamoDBLocalClientFactory extends Configured startSingletonServer(); final Configuration conf = getConf(); + // use the default credential provider chain + conf.unset(AWS_CREDENTIALS_PROVIDER); final AWSCredentialsProvider credentials = createAWSCredentialProviderSet(null, conf); final ClientConfiguration awsConf = --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org