http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFinalize.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFinalize.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFinalize.java new file mode 100644 index 0000000..e4acbae --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFinalize.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.azurebfs; + +import java.lang.ref.WeakReference; + +import org.junit.Assert; +import org.junit.Test; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; + +/** + * Test finalize() method when "fs.abfs.impl.disable.cache" is enabled. + */ +public class ITestAzureBlobFileSystemFinalize extends AbstractAbfsScaleTest{ + static final String DISABLE_CACHE_KEY = "fs.abfs.impl.disable.cache"; + + public ITestAzureBlobFileSystemFinalize() throws Exception { + super(); + } + + @Test + public void testFinalize() throws Exception { + // Disable the cache for filesystem to make sure there is no reference. + Configuration configuration = this.getConfiguration(); + configuration.setBoolean(this.DISABLE_CACHE_KEY, true); + + AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.get(configuration); + + WeakReference<Object> ref = new WeakReference<Object>(fs); + fs = null; + + int i = 0; + int maxTries = 1000; + while (ref.get() != null && i < maxTries) { + System.gc(); + System.runFinalization(); + i++; + } + + Assert.assertTrue("testFinalizer didn't get cleaned up within maxTries", ref.get() == null); + } +} \ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFlush.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFlush.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFlush.java index d90f018..2f40b64 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFlush.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFlush.java @@ -20,12 +20,20 @@ package org.apache.hadoop.fs.azurebfs; import java.util.ArrayList; import java.util.List; +import java.util.EnumSet; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; +import java.io.IOException; +import com.microsoft.azure.storage.blob.BlockEntry; +import com.microsoft.azure.storage.blob.BlockListingFilter; +import com.microsoft.azure.storage.blob.CloudBlockBlob; +import org.apache.hadoop.fs.azure.AzureBlobStorageTestAccount; +import org.hamcrest.core.IsEqual; +import org.hamcrest.core.IsNot; import org.junit.Test; import org.apache.hadoop.fs.FSDataInputStream; @@ -46,6 +54,8 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { private static final int THREAD_SLEEP_TIME = 6000; private static final Path TEST_FILE_PATH = new Path("/testfile"); + private static final int TEST_FILE_LENGTH = 1024 * 1024 * 8; + private static final int WAITING_TIME = 4000; public ITestAzureBlobFileSystemFlush() { super(); @@ -55,7 +65,7 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { public void testAbfsOutputStreamAsyncFlushWithRetainUncommittedData() throws Exception { final AzureBlobFileSystem fs = getFileSystem(); final byte[] b; - try(final FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { + try (FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { b = new byte[TEST_BUFFER_SIZE]; new Random().nextBytes(b); @@ -70,7 +80,7 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { } final byte[] r = new byte[TEST_BUFFER_SIZE]; - try(FSDataInputStream inputStream = fs.open(TEST_FILE_PATH, 4 * ONE_MB)) { + try (FSDataInputStream inputStream = fs.open(TEST_FILE_PATH, 4 * ONE_MB)) { while (inputStream.available() != 0) { int result = inputStream.read(r); @@ -84,7 +94,7 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { public void testAbfsOutputStreamSyncFlush() throws Exception { final AzureBlobFileSystem fs = getFileSystem(); final byte[] b; - try(final FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { + try (FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { b = new byte[TEST_BUFFER_SIZE]; new Random().nextBytes(b); stream.write(b); @@ -97,7 +107,7 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { } final byte[] r = new byte[TEST_BUFFER_SIZE]; - try(FSDataInputStream inputStream = fs.open(TEST_FILE_PATH, 4 * ONE_MB)) { + try (FSDataInputStream inputStream = fs.open(TEST_FILE_PATH, 4 * ONE_MB)) { int result = inputStream.read(r); assertNotEquals(-1, result); @@ -111,7 +121,7 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { final AzureBlobFileSystem fs = getFileSystem(); final FileSystem.Statistics abfsStatistics; ExecutorService es; - try(final FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { + try (FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { abfsStatistics = fs.getFsStatistics(); abfsStatistics.reset(); @@ -160,7 +170,7 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { public void testWriteHeavyBytesToFileAsyncFlush() throws Exception { final AzureBlobFileSystem fs = getFileSystem(); ExecutorService es = Executors.newFixedThreadPool(10); - try(final FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { + try (FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) { final byte[] b = new byte[TEST_BUFFER_SIZE]; new Random().nextBytes(b); @@ -196,4 +206,118 @@ public class ITestAzureBlobFileSystemFlush extends AbstractAbfsScaleTest { FileStatus fileStatus = fs.getFileStatus(TEST_FILE_PATH); assertEquals((long) TEST_BUFFER_SIZE * FLUSH_TIMES, fileStatus.getLen()); } + + @Test + public void testFlushWithFlushEnabled() throws Exception { + AzureBlobStorageTestAccount testAccount = createWasbTestAccount(); + String wasbUrl = testAccount.getFileSystem().getName(); + String abfsUrl = wasbUrlToAbfsUrl(wasbUrl); + final AzureBlobFileSystem fs = this.getFileSystem(abfsUrl); + byte[] buffer = getRandomBytesArray(); + CloudBlockBlob blob = testAccount.getBlobReference(TEST_FILE_PATH.toString().substring(1)); + try (FSDataOutputStream stream = getStreamAfterWrite(fs, TEST_FILE_PATH, buffer, true)) { + // Wait for write request to be executed + Thread.sleep(WAITING_TIME); + stream.flush(); + ArrayList<BlockEntry> blockList = blob.downloadBlockList( + BlockListingFilter.COMMITTED, null, null, null); + // verify block has been committed + assertEquals(1, blockList.size()); + } + } + + @Test + public void testFlushWithFlushDisabled() throws Exception { + AzureBlobStorageTestAccount testAccount = createWasbTestAccount(); + String wasbUrl = testAccount.getFileSystem().getName(); + String abfsUrl = wasbUrlToAbfsUrl(wasbUrl); + final AzureBlobFileSystem fs = this.getFileSystem(abfsUrl); + byte[] buffer = getRandomBytesArray(); + CloudBlockBlob blob = testAccount.getBlobReference(TEST_FILE_PATH.toString().substring(1)); + try (FSDataOutputStream stream = getStreamAfterWrite(fs, TEST_FILE_PATH, buffer, false)) { + // Wait for write request to be executed + Thread.sleep(WAITING_TIME); + stream.flush(); + ArrayList<BlockEntry> blockList = blob.downloadBlockList( + BlockListingFilter.COMMITTED, null, null, null); + // verify block has not been committed + assertEquals(0, blockList.size()); + } + } + + @Test + public void testHflushWithFlushEnabled() throws Exception { + final AzureBlobFileSystem fs = this.getFileSystem(); + byte[] buffer = getRandomBytesArray(); + try (FSDataOutputStream stream = getStreamAfterWrite(fs, TEST_FILE_PATH, buffer, true)) { + stream.hflush(); + validate(fs, TEST_FILE_PATH, buffer, true); + } + } + + @Test + public void testHflushWithFlushDisabled() throws Exception { + final AzureBlobFileSystem fs = this.getFileSystem(); + byte[] buffer = getRandomBytesArray(); + try (FSDataOutputStream stream = getStreamAfterWrite(fs, TEST_FILE_PATH, buffer, false)) { + stream.hflush(); + validate(fs, TEST_FILE_PATH, buffer, false); + } + } + + @Test + public void testHsyncWithFlushEnabled() throws Exception { + final AzureBlobFileSystem fs = this.getFileSystem(); + byte[] buffer = getRandomBytesArray(); + try (FSDataOutputStream stream = getStreamAfterWrite(fs, TEST_FILE_PATH, buffer, true)) { + stream.hsync(); + validate(fs, TEST_FILE_PATH, buffer, true); + } + } + + @Test + public void testHsyncWithFlushDisabled() throws Exception { + final AzureBlobFileSystem fs = this.getFileSystem(); + byte[] buffer = getRandomBytesArray(); + try (FSDataOutputStream stream = getStreamAfterWrite(fs, TEST_FILE_PATH, buffer, false)) { + stream.hsync(); + validate(fs, TEST_FILE_PATH, buffer, false); + } + } + + private byte[] getRandomBytesArray() { + final byte[] b = new byte[TEST_FILE_LENGTH]; + new Random().nextBytes(b); + return b; + } + + private FSDataOutputStream getStreamAfterWrite(AzureBlobFileSystem fs, Path path, byte[] buffer, boolean enableFlush) throws IOException { + fs.getAbfsStore().getAbfsConfiguration().setEnableFlush(enableFlush); + FSDataOutputStream stream = fs.create(path); + stream.write(buffer); + return stream; + } + + private AzureBlobStorageTestAccount createWasbTestAccount() throws Exception { + return AzureBlobStorageTestAccount.create("", EnumSet.of(AzureBlobStorageTestAccount.CreateOptions.CreateContainer), + this.getConfiguration()); + } + + private void validate(FileSystem fs, Path path, byte[] writeBuffer, boolean isEqual) throws IOException { + String filePath = path.toUri().toString(); + try (FSDataInputStream inputStream = fs.open(path)) { + byte[] readBuffer = new byte[TEST_FILE_LENGTH]; + int numBytesRead = inputStream.read(readBuffer, 0, readBuffer.length); + if (isEqual) { + assertArrayEquals( + String.format("Bytes read do not match bytes written to %1$s", filePath), writeBuffer, readBuffer); + } else { + assertThat( + String.format("Bytes read unexpectedly match bytes written to %1$s", + filePath), + readBuffer, + IsNot.not(IsEqual.equalTo(writeBuffer))); + } + } + } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemInitAndCreate.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemInitAndCreate.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemInitAndCreate.java index 5a6e46d..874a8a3 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemInitAndCreate.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemInitAndCreate.java @@ -20,10 +20,10 @@ package org.apache.hadoop.fs.azurebfs; import java.io.FileNotFoundException; -import org.junit.Test; - import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.junit.Test; + import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemRename.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemRename.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemRename.java index 1a0edaf..07426c4 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemRename.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemRename.java @@ -30,7 +30,6 @@ import org.junit.Test; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; -import static org.apache.hadoop.fs.contract.ContractTestUtils.assertIsDirectory; import static org.apache.hadoop.fs.contract.ContractTestUtils.assertMkdirs; import static org.apache.hadoop.fs.contract.ContractTestUtils.assertPathDoesNotExist; import static org.apache.hadoop.fs.contract.ContractTestUtils.assertRenameOutcome; @@ -86,7 +85,7 @@ public class ITestAzureBlobFileSystemRename extends assertRenameOutcome(fs, test1, new Path("testDir/test10"), true); - assertPathDoesNotExist(fs, "rename source dir", test1 ); + assertPathDoesNotExist(fs, "rename source dir", test1); } @Test http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestFileSystemProperties.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestFileSystemProperties.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestFileSystemProperties.java index 1c71125..7a7e327 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestFileSystemProperties.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestFileSystemProperties.java @@ -20,7 +20,6 @@ package org.apache.hadoop.fs.azurebfs; import java.util.Hashtable; -import org.junit.Ignore; import org.junit.Test; import org.apache.hadoop.fs.FSDataInputStream; @@ -28,8 +27,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; -import static org.junit.Assert.assertEquals; - /** * Test FileSystemProperties. */ @@ -62,7 +59,6 @@ public class ITestFileSystemProperties extends AbstractAbfsIntegrationTest { } @Test - @Ignore("JDK7 doesn't support PATCH, so PUT is used. Fix is applied in latest test tenant") public void testBase64FileSystemProperties() throws Exception { final AzureBlobFileSystem fs = getFileSystem(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java new file mode 100644 index 0000000..fb667dd --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java @@ -0,0 +1,149 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.azurebfs; + +import java.lang.reflect.Field; + +import org.apache.commons.codec.Charsets; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; +import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.IntegerConfigurationValidatorAnnotation; +import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.BooleanConfigurationValidatorAnnotation; +import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.StringConfigurationValidatorAnnotation; +import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.LongConfigurationValidatorAnnotation; +import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.Base64StringConfigurationValidatorAnnotation; +import org.apache.hadoop.fs.azurebfs.contracts.exceptions.ConfigurationPropertyNotFoundException; + +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_READ_BUFFER_SIZE; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_WRITE_BUFFER_SIZE; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_MAX_RETRY_ATTEMPTS; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_BACKOFF_INTERVAL; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_MAX_BACKOFF_INTERVAL; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_MIN_BACKOFF_INTERVAL; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.MAX_AZURE_BLOCK_SIZE; +import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.AZURE_BLOCK_LOCATION_HOST_DEFAULT; + +import org.apache.commons.codec.binary.Base64; +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +/** + * Test ConfigurationServiceFieldsValidation. + */ +public class TestAbfsConfigurationFieldsValidation { + private AbfsConfiguration abfsConfiguration; + + private static final String INT_KEY= "intKey"; + private static final String LONG_KEY= "longKey"; + private static final String STRING_KEY= "stringKey"; + private static final String BASE64_KEY= "base64Key"; + private static final String BOOLEAN_KEY= "booleanKey"; + private static final int DEFAULT_INT = 4194304; + private static final int DEFAULT_LONG = 4194304; + + private static final int TEST_INT = 1234565; + private static final int TEST_LONG = 4194304; + + private final String encodedString; + private final String encodedAccountKey; + + @IntegerConfigurationValidatorAnnotation(ConfigurationKey = INT_KEY, + MinValue = Integer.MIN_VALUE, + MaxValue = Integer.MAX_VALUE, + DefaultValue = DEFAULT_INT) + private int intField; + + @LongConfigurationValidatorAnnotation(ConfigurationKey = LONG_KEY, + MinValue = Long.MIN_VALUE, + MaxValue = Long.MAX_VALUE, + DefaultValue = DEFAULT_LONG) + private int longField; + + @StringConfigurationValidatorAnnotation(ConfigurationKey = STRING_KEY, + DefaultValue = "default") + private String stringField; + + @Base64StringConfigurationValidatorAnnotation(ConfigurationKey = BASE64_KEY, + DefaultValue = "base64") + private String base64Field; + + @BooleanConfigurationValidatorAnnotation(ConfigurationKey = BOOLEAN_KEY, + DefaultValue = false) + private boolean boolField; + + public TestAbfsConfigurationFieldsValidation() throws Exception { + super(); + Base64 base64 = new Base64(); + this.encodedString = new String(base64.encode("base64Value".getBytes(Charsets.UTF_8)), Charsets.UTF_8); + this.encodedAccountKey = new String(base64.encode("someAccountKey".getBytes(Charsets.UTF_8)), Charsets.UTF_8); + Configuration configuration = new Configuration(); + configuration.addResource("azure-bfs-test.xml"); + configuration.set(INT_KEY, "1234565"); + configuration.set(LONG_KEY, "4194304"); + configuration.set(STRING_KEY, "stringValue"); + configuration.set(BASE64_KEY, encodedString); + configuration.set(BOOLEAN_KEY, "true"); + configuration.set(ConfigurationKeys.FS_AZURE_ACCOUNT_KEY_PROPERTY_NAME + "testaccount1.blob.core.windows.net", this.encodedAccountKey); + abfsConfiguration = new AbfsConfiguration(configuration); + } + + @Test + public void testValidateFunctionsInConfigServiceImpl() throws Exception { + Field[] fields = this.getClass().getDeclaredFields(); + for (Field field : fields) { + field.setAccessible(true); + if (field.isAnnotationPresent(IntegerConfigurationValidatorAnnotation.class)) { + assertEquals(TEST_INT, abfsConfiguration.validateInt(field)); + } else if (field.isAnnotationPresent(LongConfigurationValidatorAnnotation.class)) { + assertEquals(DEFAULT_LONG, abfsConfiguration.validateLong(field)); + } else if (field.isAnnotationPresent(StringConfigurationValidatorAnnotation.class)) { + assertEquals("stringValue", abfsConfiguration.validateString(field)); + } else if (field.isAnnotationPresent(Base64StringConfigurationValidatorAnnotation.class)) { + assertEquals(this.encodedString, abfsConfiguration.validateBase64String(field)); + } else if (field.isAnnotationPresent(BooleanConfigurationValidatorAnnotation.class)) { + assertEquals(true, abfsConfiguration.validateBoolean(field)); + } + } + } + + @Test + public void testConfigServiceImplAnnotatedFieldsInitialized() throws Exception { + // test that all the ConfigurationServiceImpl annotated fields have been initialized in the constructor + assertEquals(DEFAULT_WRITE_BUFFER_SIZE, abfsConfiguration.getWriteBufferSize()); + assertEquals(DEFAULT_READ_BUFFER_SIZE, abfsConfiguration.getReadBufferSize()); + assertEquals(DEFAULT_MIN_BACKOFF_INTERVAL, abfsConfiguration.getMinBackoffIntervalMilliseconds()); + assertEquals(DEFAULT_MAX_BACKOFF_INTERVAL, abfsConfiguration.getMaxBackoffIntervalMilliseconds()); + assertEquals(DEFAULT_BACKOFF_INTERVAL, abfsConfiguration.getBackoffIntervalMilliseconds()); + assertEquals(DEFAULT_MAX_RETRY_ATTEMPTS, abfsConfiguration.getMaxIoRetries()); + assertEquals(MAX_AZURE_BLOCK_SIZE, abfsConfiguration.getAzureBlockSize()); + assertEquals(AZURE_BLOCK_LOCATION_HOST_DEFAULT, abfsConfiguration.getAzureBlockLocationHost()); + } + + @Test + public void testGetAccountKey() throws Exception { + String accountKey = abfsConfiguration.getStorageAccountKey("testaccount1.blob.core.windows.net"); + assertEquals(this.encodedAccountKey, accountKey); + } + + @Test (expected = ConfigurationPropertyNotFoundException.class) + public void testGetAccountKeyWithNonExistingAccountName() throws Exception { + abfsConfiguration.getStorageAccountKey("bogusAccountName"); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java index d365e6e..c0c5f91 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java @@ -43,9 +43,8 @@ public class AbfsFileSystemContract extends AbstractBondedFSContract { @Override public String getScheme() { - return isSecure ? - FileSystemUriSchemes.ABFS_SECURE_SCHEME - : FileSystemUriSchemes.ABFS_SCHEME; + return isSecure ? FileSystemUriSchemes.ABFS_SECURE_SCHEME + : FileSystemUriSchemes.ABFS_SCHEME; } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsReadWriteAndSeek.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsReadWriteAndSeek.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsReadWriteAndSeek.java deleted file mode 100644 index dd06fe3..0000000 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsReadWriteAndSeek.java +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.azurebfs.services; - -import java.util.Arrays; -import java.util.Random; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.azurebfs.AbstractAbfsScaleTest; -import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem; - -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_READ_BUFFER_SIZE; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.MAX_BUFFER_SIZE; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.MIN_BUFFER_SIZE; - -/** - * Test read, write and seek. - * Uses package-private methods in AbfsConfiguration, which is why it is in - * this package. - */ -@RunWith(Parameterized.class) -public class ITestAbfsReadWriteAndSeek extends AbstractAbfsScaleTest { - private static final Path TEST_PATH = new Path("/testfile"); - - @Parameterized.Parameters(name = "Size={0}") - public static Iterable<Object[]> sizes() { - return Arrays.asList(new Object[][]{{MIN_BUFFER_SIZE}, - {DEFAULT_READ_BUFFER_SIZE}, - {MAX_BUFFER_SIZE}}); - } - - private final int size; - - public ITestAbfsReadWriteAndSeek(final int size) { - this.size = size; - } - - @Test - public void testReadAndWriteWithDifferentBufferSizesAndSeek() throws Exception { - testReadWriteAndSeek(size); - } - - private void testReadWriteAndSeek(int bufferSize) throws Exception { - final AzureBlobFileSystem fs = getFileSystem(); - final AbfsConfiguration abfsConfiguration = new AbfsConfiguration(getConfiguration()); - - abfsConfiguration.setWriteBufferSize(bufferSize); - abfsConfiguration.setReadBufferSize(bufferSize); - - - final byte[] b = new byte[2 * bufferSize]; - new Random().nextBytes(b); - try(final FSDataOutputStream stream = fs.create(TEST_PATH)) { - stream.write(b); - } - - final byte[] readBuffer = new byte[2 * bufferSize]; - int result; - try(final FSDataInputStream inputStream = fs.open(TEST_PATH)) { - inputStream.seek(bufferSize); - result = inputStream.read(readBuffer, bufferSize, bufferSize); - assertNotEquals(-1, result); - inputStream.seek(0); - result = inputStream.read(readBuffer, 0, bufferSize); - } - assertNotEquals("data read in final read()", -1, result); - assertArrayEquals(readBuffer, b); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsClient.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsClient.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsClient.java new file mode 100644 index 0000000..0b335a5 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsClient.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.azurebfs.services; + +import java.net.URL; +import java.util.regex.Pattern; + +import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; +import org.junit.Assert; +import org.junit.Test; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; + +/** + * Test useragent of abfs client. + * + */ +public final class TestAbfsClient { + + @Test + public void verifyUnknownUserAgent() throws Exception { + String expectedUserAgentPattern = "Azure Blob FS\\/1.0 \\(JavaJRE ([^\\)]+)\\)"; + final Configuration configuration = new Configuration(); + configuration.unset(ConfigurationKeys.FS_AZURE_USER_AGENT_PREFIX_KEY); + AbfsConfiguration abfsConfiguration = new AbfsConfiguration(configuration); + AbfsClient abfsClient = new AbfsClient(new URL("http://azure.com"), null, abfsConfiguration, null); + String userAgent = abfsClient.initializeUserAgent(abfsConfiguration); + Pattern pattern = Pattern.compile(expectedUserAgentPattern); + Assert.assertTrue(pattern.matcher(userAgent).matches()); + } + + @Test + public void verifyUserAgent() throws Exception { + String expectedUserAgentPattern = "Azure Blob FS\\/1.0 \\(JavaJRE ([^\\)]+)\\) Partner Service"; + final Configuration configuration = new Configuration(); + configuration.set(ConfigurationKeys.FS_AZURE_USER_AGENT_PREFIX_KEY, "Partner Service"); + AbfsConfiguration abfsConfiguration = new AbfsConfiguration(configuration); + AbfsClient abfsClient = new AbfsClient(new URL("http://azure.com"), null, abfsConfiguration, null); + String userAgent = abfsClient.initializeUserAgent(abfsConfiguration); + Pattern pattern = Pattern.compile(expectedUserAgentPattern); + Assert.assertTrue(pattern.matcher(userAgent).matches()); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsConfigurationFieldsValidation.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsConfigurationFieldsValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsConfigurationFieldsValidation.java deleted file mode 100644 index ebaafa4..0000000 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestAbfsConfigurationFieldsValidation.java +++ /dev/null @@ -1,147 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.azurebfs.services; - -import java.lang.reflect.Field; - -import org.apache.commons.codec.Charsets; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; -import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.IntegerConfigurationValidatorAnnotation; -import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.BooleanConfigurationValidatorAnnotation; -import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.StringConfigurationValidatorAnnotation; -import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.LongConfigurationValidatorAnnotation; -import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.Base64StringConfigurationValidatorAnnotation; -import org.apache.hadoop.fs.azurebfs.contracts.exceptions.ConfigurationPropertyNotFoundException; - -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_READ_BUFFER_SIZE; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_WRITE_BUFFER_SIZE; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_MAX_RETRY_ATTEMPTS; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_BACKOFF_INTERVAL; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_MAX_BACKOFF_INTERVAL; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_MIN_BACKOFF_INTERVAL; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.MAX_AZURE_BLOCK_SIZE; -import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.AZURE_BLOCK_LOCATION_HOST_DEFAULT; - -import org.apache.commons.codec.binary.Base64; -import static org.junit.Assert.assertEquals; - -import org.junit.Test; - -/** - * Test ConfigurationServiceFieldsValidation. - */ -public class TestAbfsConfigurationFieldsValidation { - private final AbfsConfiguration abfsConfiguration; - - private static final String INT_KEY= "intKey"; - private static final String LONG_KEY= "longKey"; - private static final String STRING_KEY= "stringKey"; - private static final String BASE64_KEY= "base64Key"; - private static final String BOOLEAN_KEY= "booleanKey"; - private static final int DEFAULT_INT = 4194304; - private static final int DEFAULT_LONG = 4194304; - - private static final int TEST_INT = 1234565; - private static final int TEST_LONG = 4194304; - - private final String encodedString; - private final String encodedAccountKey; - - @IntegerConfigurationValidatorAnnotation(ConfigurationKey = INT_KEY, - MinValue = Integer.MIN_VALUE, - MaxValue = Integer.MAX_VALUE, - DefaultValue = DEFAULT_INT) - private int intField; - - @LongConfigurationValidatorAnnotation(ConfigurationKey = LONG_KEY, - MinValue = Long.MIN_VALUE, - MaxValue = Long.MAX_VALUE, - DefaultValue = DEFAULT_LONG) - private int longField; - - @StringConfigurationValidatorAnnotation(ConfigurationKey = STRING_KEY, - DefaultValue = "default") - private String stringField; - - @Base64StringConfigurationValidatorAnnotation(ConfigurationKey = BASE64_KEY, - DefaultValue = "base64") - private String base64Field; - - @BooleanConfigurationValidatorAnnotation(ConfigurationKey = BOOLEAN_KEY, - DefaultValue = false) - private boolean boolField; - - public TestAbfsConfigurationFieldsValidation() throws Exception { - Base64 base64 = new Base64(); - this.encodedString = new String(base64.encode("base64Value".getBytes(Charsets.UTF_8)), Charsets.UTF_8); - this.encodedAccountKey = new String(base64.encode("someAccountKey".getBytes(Charsets.UTF_8)), Charsets.UTF_8); - Configuration configuration = new Configuration(false); - configuration.set(INT_KEY, "1234565"); - configuration.set(LONG_KEY, "4194304"); - configuration.set(STRING_KEY, "stringValue"); - configuration.set(BASE64_KEY, encodedString); - configuration.set(BOOLEAN_KEY, "true"); - configuration.set(ConfigurationKeys.FS_AZURE_ACCOUNT_KEY_PROPERTY_NAME + "testaccount1.blob.core.windows.net", this.encodedAccountKey); - abfsConfiguration = new AbfsConfiguration(configuration); - } - - @Test - public void testValidateFunctionsInConfigServiceImpl() throws Exception { - Field[] fields = this.getClass().getDeclaredFields(); - for (Field field : fields) { - field.setAccessible(true); - if (field.isAnnotationPresent(IntegerConfigurationValidatorAnnotation.class)) { - assertEquals(TEST_INT, abfsConfiguration.validateInt(field)); - } else if (field.isAnnotationPresent(LongConfigurationValidatorAnnotation.class)) { - assertEquals(DEFAULT_LONG, abfsConfiguration.validateLong(field)); - } else if (field.isAnnotationPresent(StringConfigurationValidatorAnnotation.class)) { - assertEquals("stringValue", abfsConfiguration.validateString(field)); - } else if (field.isAnnotationPresent(Base64StringConfigurationValidatorAnnotation.class)) { - assertEquals(this.encodedString, abfsConfiguration.validateBase64String(field)); - } else if (field.isAnnotationPresent(BooleanConfigurationValidatorAnnotation.class)) { - assertEquals(true, abfsConfiguration.validateBoolean(field)); - } - } - } - - @Test - public void testConfigServiceImplAnnotatedFieldsInitialized() throws Exception { - // test that all the ConfigurationServiceImpl annotated fields have been initialized in the constructor - assertEquals(DEFAULT_WRITE_BUFFER_SIZE, abfsConfiguration.getWriteBufferSize()); - assertEquals(DEFAULT_READ_BUFFER_SIZE, abfsConfiguration.getReadBufferSize()); - assertEquals(DEFAULT_MIN_BACKOFF_INTERVAL, abfsConfiguration.getMinBackoffIntervalMilliseconds()); - assertEquals(DEFAULT_MAX_BACKOFF_INTERVAL, abfsConfiguration.getMaxBackoffIntervalMilliseconds()); - assertEquals(DEFAULT_BACKOFF_INTERVAL, abfsConfiguration.getBackoffIntervalMilliseconds()); - assertEquals(DEFAULT_MAX_RETRY_ATTEMPTS, abfsConfiguration.getMaxIoRetries()); - assertEquals(MAX_AZURE_BLOCK_SIZE, abfsConfiguration.getAzureBlockSize()); - assertEquals(AZURE_BLOCK_LOCATION_HOST_DEFAULT, abfsConfiguration.getAzureBlockLocationHost()); - } - - @Test - public void testGetAccountKey() throws Exception { - String accountKey = abfsConfiguration.getStorageAccountKey("testaccount1.blob.core.windows.net"); - assertEquals(this.encodedAccountKey, accountKey); - } - - @Test (expected = ConfigurationPropertyNotFoundException.class) - public void testGetAccountKeyWithNonExistingAccountName() throws Exception { - abfsConfiguration.getStorageAccountKey("bogusAccountName"); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/b54b0c1b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java new file mode 100644 index 0000000..d17e767 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java @@ -0,0 +1,89 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.azurebfs.services; + +import java.io.File; + +import org.junit.Assert; +import org.junit.Test; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; +import org.apache.hadoop.fs.azurebfs.contracts.exceptions.KeyProviderException; +import org.apache.hadoop.util.Shell; + +import static org.junit.Assert.assertEquals; + +/** + * Test ShellDecryptionKeyProvider. + * + */ +public class TestShellDecryptionKeyProvider { + public static final Log LOG = LogFactory + .getLog(TestShellDecryptionKeyProvider.class); + private static final File TEST_ROOT_DIR = new File(System.getProperty( + "test.build.data", "/tmp"), "TestShellDecryptionKeyProvider"); + + @Test + public void testScriptPathNotSpecified() throws Exception { + if (!Shell.WINDOWS) { + return; + } + ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider(); + Configuration conf = new Configuration(); + String account = "testacct"; + String key = "key"; + + conf.set(ConfigurationKeys.FS_AZURE_ACCOUNT_KEY_PROPERTY_NAME + account, key); + try { + provider.getStorageAccountKey(account, conf); + Assert + .fail("fs.azure.shellkeyprovider.script is not specified, we should throw"); + } catch (KeyProviderException e) { + LOG.info("Received an expected exception: " + e.getMessage()); + } + } + + @Test + public void testValidScript() throws Exception { + if (!Shell.WINDOWS) { + return; + } + String expectedResult = "decretedKey"; + + // Create a simple script which echoes the given key plus the given + // expected result (so that we validate both script input and output) + File scriptFile = new File(TEST_ROOT_DIR, "testScript.cmd"); + FileUtils.writeStringToFile(scriptFile, "@echo %1 " + expectedResult); + + ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider(); + Configuration conf = new Configuration(); + String account = "testacct"; + String key = "key1"; + conf.set(ConfigurationKeys.FS_AZURE_ACCOUNT_KEY_PROPERTY_NAME + account, key); + conf.set(ConfigurationKeys.AZURE_KEY_ACCOUNT_SHELLKEYPROVIDER_SCRIPT, + "cmd /c " + scriptFile.getAbsolutePath()); + + String result = provider.getStorageAccountKey(account, conf); + assertEquals(key + " " + expectedResult, result); + } +} --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org