http://git-wip-us.apache.org/repos/asf/hbase/blob/1eac103e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
new file mode 100644
index 0000000..7501849
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.asyncfs;
+
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATA_ENCRYPTION_ALGORITHM_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY;
+
+import io.netty.channel.EventLoop;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.ExecutionException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hbase.security.token.TestGenerateDelegationToken;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+@Category({ MiscTests.class, MediumTests.class })
+public class TestSaslFanOutOneBlockAsyncDFSOutput {
+
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  private static DistributedFileSystem FS;
+
+  private static EventLoopGroup EVENT_LOOP_GROUP;
+
+  private static int READ_TIMEOUT_MS = 200000;
+
+  private static final File KEYTAB_FILE = new File(
+      TEST_UTIL.getDataTestDir("keytab").toUri().getPath());
+
+  private static MiniKdc KDC;
+
+  private static String HOST = "localhost";
+
+  private static String USERNAME;
+
+  private static String PRINCIPAL;
+
+  private static String HTTP_PRINCIPAL;
+  @Rule
+  public TestName name = new TestName();
+
+  @Parameter(0)
+  public String protection;
+
+  @Parameter(1)
+  public String encryptionAlgorithm;
+
+  @Parameters(name = "{index}: protection={0}, encryption={1}")
+  public static Iterable<Object[]> data() {
+    List<Object[]> params = new ArrayList<>();
+    for (String protection : Arrays.asList("authentication", "integrity", 
"privacy")) {
+      for (String encryptionAlgorithm : Arrays.asList("", "3des", "rc4")) {
+        params.add(new Object[] { protection, encryptionAlgorithm });
+      }
+    }
+    return params;
+  }
+
+  private static void setHdfsSecuredConfiguration(Configuration conf) throws 
Exception {
+    // change XXX_USER_NAME_KEY to XXX_KERBEROS_PRINCIPAL_KEY after we drop 
support for hadoop-2.4.1
+    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
+    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
+    conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
+    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
+    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+      HTTP_PRINCIPAL + "@" + KDC.getRealm());
+    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
+    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
+
+    File keystoresDir = new 
File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
+    keystoresDir.mkdirs();
+    String sslConfDir = 
KeyStoreTestUtil.getClasspathDir(TestGenerateDelegationToken.class);
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), 
sslConfDir, conf, false);
+
+    conf.setBoolean("ignore.secure.ports.for.testing", true);
+  }
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    
Logger.getLogger("org.apache.hadoop.hdfs.StateChange").setLevel(Level.DEBUG);
+    Logger.getLogger("BlockStateChange").setLevel(Level.DEBUG);
+    EVENT_LOOP_GROUP = new NioEventLoopGroup();
+    TEST_UTIL.getConfiguration().setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, 
READ_TIMEOUT_MS);
+    Properties conf = MiniKdc.createConf();
+    conf.put(MiniKdc.DEBUG, true);
+    KDC = new MiniKdc(conf, new 
File(TEST_UTIL.getDataTestDir("kdc").toUri().getPath()));
+    KDC.start();
+    USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
+    PRINCIPAL = USERNAME + "/" + HOST;
+    HTTP_PRINCIPAL = "HTTP/" + HOST;
+    KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
+    setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration());
+    HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
+    HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + 
KDC.getRealm());
+    HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration());
+    UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration());
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws IOException, 
InterruptedException {
+    if (EVENT_LOOP_GROUP != null) {
+      EVENT_LOOP_GROUP.shutdownGracefully().sync();
+    }
+    if (KDC != null) {
+      KDC.stop();
+    }
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    TEST_UTIL.getConfiguration().set("dfs.data.transfer.protection", 
protection);
+    if (StringUtils.isBlank(encryptionAlgorithm)) {
+      TEST_UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, 
false);
+      TEST_UTIL.getConfiguration().unset(DFS_DATA_ENCRYPTION_ALGORITHM_KEY);
+    } else {
+      TEST_UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, 
true);
+      TEST_UTIL.getConfiguration().set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, 
encryptionAlgorithm);
+    }
+    TEST_UTIL.startMiniDFSCluster(3);
+    FS = TEST_UTIL.getDFSCluster().getFileSystem();
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    TEST_UTIL.shutdownMiniDFSCluster();
+  }
+
+  private Path getTestFile() {
+    return new Path("/" + name.getMethodName().replaceAll("[^0-9a-zA-Z]", 
"_"));
+  }
+
+  @Test
+  public void test() throws IOException, InterruptedException, 
ExecutionException {
+    Path f = getTestFile();
+    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
+    final FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f,
+      true, false, (short) 1, FS.getDefaultBlockSize(), eventLoop);
+    TestFanOutOneBlockAsyncDFSOutput.writeAndVerify(eventLoop, FS, f, out);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/1eac103e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
index 7d6c6d9..b64d458 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java
@@ -24,9 +24,9 @@ import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.wal.AsyncFSWALProvider;
 import org.apache.hadoop.hbase.wal.WAL.Entry;
+import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputFlushHandler;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
-import org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputFlushHandler;
 import org.apache.hadoop.hbase.wal.WALProvider;
 import org.apache.hadoop.hbase.wal.WALProvider.AsyncWriter;
 import org.junit.AfterClass;

http://git-wip-us.apache.org/repos/asf/hbase/blob/1eac103e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputFlushHandler.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputFlushHandler.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputFlushHandler.java
deleted file mode 100644
index cbd0761..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputFlushHandler.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.util;
-
-import java.nio.channels.CompletionHandler;
-import java.util.concurrent.ExecutionException;
-
-public final class FanOutOneBlockAsyncDFSOutputFlushHandler
-    implements CompletionHandler<Long, Void> {
-
-  private long size;
-
-  private Throwable error;
-
-  private boolean finished;
-
-  @Override
-  public synchronized void completed(Long result, Void attachment) {
-    size = result.longValue();
-    finished = true;
-    notifyAll();
-  }
-
-  @Override
-  public synchronized void failed(Throwable exc, Void attachment) {
-    error = exc;
-    finished = true;
-    notifyAll();
-  }
-
-  public synchronized long get() throws InterruptedException, 
ExecutionException {
-    while (!finished) {
-      wait();
-    }
-    if (error != null) {
-      throw new ExecutionException(error);
-    }
-    return size;
-  }
-
-  public void reset() {
-    size = 0L;
-    error = null;
-    finished = false;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/1eac103e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFanOutOneBlockAsyncDFSOutput.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFanOutOneBlockAsyncDFSOutput.java
deleted file mode 100644
index a10712e..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFanOutOneBlockAsyncDFSOutput.java
+++ /dev/null
@@ -1,246 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.util;
-
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ThreadLocalRandom;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.util.Daemon;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-
-import io.netty.channel.EventLoop;
-import io.netty.channel.EventLoopGroup;
-import io.netty.channel.nio.NioEventLoopGroup;
-
-@Category({ MiscTests.class, MediumTests.class })
-public class TestFanOutOneBlockAsyncDFSOutput {
-
-  private static final Log LOG = 
LogFactory.getLog(TestFanOutOneBlockAsyncDFSOutput.class);
-
-  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
-
-  private static DistributedFileSystem FS;
-
-  private static EventLoopGroup EVENT_LOOP_GROUP;
-
-  private static int READ_TIMEOUT_MS = 2000;
-
-  @Rule
-  public TestName name = new TestName();
-
-  @BeforeClass
-  public static void setUp() throws Exception {
-    TEST_UTIL.getConfiguration().setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, 
READ_TIMEOUT_MS);
-    TEST_UTIL.startMiniDFSCluster(3);
-    FS = TEST_UTIL.getDFSCluster().getFileSystem();
-    EVENT_LOOP_GROUP = new NioEventLoopGroup();
-  }
-
-  @AfterClass
-  public static void tearDown() throws IOException, InterruptedException {
-    if (EVENT_LOOP_GROUP != null) {
-      EVENT_LOOP_GROUP.shutdownGracefully().sync();
-    }
-    TEST_UTIL.shutdownMiniDFSCluster();
-  }
-
-  private void ensureAllDatanodeAlive() throws InterruptedException {
-    // FanOutOneBlockAsyncDFSOutputHelper.createOutput is fail-fast, so we 
need to make sure that we
-    // can create a FanOutOneBlockAsyncDFSOutput after a datanode restarting, 
otherwise some tests
-    // will fail.
-    for (;;) {
-      try {
-        FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS,
-          new Path("/ensureDatanodeAlive"), true, true, (short) 3, 
FS.getDefaultBlockSize(),
-          EVENT_LOOP_GROUP.next());
-        out.close();
-        break;
-      } catch (IOException e) {
-        Thread.sleep(100);
-      }
-    }
-  }
-
-  static void writeAndVerify(EventLoop eventLoop, DistributedFileSystem dfs, 
Path f,
-      final FanOutOneBlockAsyncDFSOutput out)
-          throws IOException, InterruptedException, ExecutionException {
-    final byte[] b = new byte[10];
-    ThreadLocalRandom.current().nextBytes(b);
-    final FanOutOneBlockAsyncDFSOutputFlushHandler handler = new 
FanOutOneBlockAsyncDFSOutputFlushHandler();
-    eventLoop.execute(new Runnable() {
-
-      @Override
-      public void run() {
-        out.write(b, 0, b.length);
-        out.flush(null, handler, false);
-      }
-    });
-    assertEquals(b.length, handler.get());
-    out.close();
-    assertEquals(b.length, dfs.getFileStatus(f).getLen());
-    byte[] actual = new byte[b.length];
-    try (FSDataInputStream in = dfs.open(f)) {
-      in.readFully(actual);
-    }
-    assertArrayEquals(b, actual);
-  }
-
-  @Test
-  public void test() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
-    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    final FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f,
-      true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop);
-    writeAndVerify(eventLoop, FS, f, out);
-  }
-
-  @Test
-  public void testRecover() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
-    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    final FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f,
-      true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop);
-    final byte[] b = new byte[10];
-    ThreadLocalRandom.current().nextBytes(b);
-    final FanOutOneBlockAsyncDFSOutputFlushHandler handler = new 
FanOutOneBlockAsyncDFSOutputFlushHandler();
-    eventLoop.execute(new Runnable() {
-
-      @Override
-      public void run() {
-        out.write(b, 0, b.length);
-        out.flush(null, handler, false);
-      }
-    });
-    handler.get();
-    // restart one datanode which causes one connection broken
-    TEST_UTIL.getDFSCluster().restartDataNode(0);
-    try {
-      handler.reset();
-      eventLoop.execute(new Runnable() {
-
-        @Override
-        public void run() {
-          out.write(b, 0, b.length);
-          out.flush(null, handler, false);
-        }
-      });
-      try {
-        handler.get();
-        fail("flush should fail");
-      } catch (ExecutionException e) {
-        // we restarted one datanode so the flush should fail
-        LOG.info("expected exception caught", e);
-      }
-      out.recoverAndClose(null);
-      assertEquals(b.length, FS.getFileStatus(f).getLen());
-      byte[] actual = new byte[b.length];
-      try (FSDataInputStream in = FS.open(f)) {
-        in.readFully(actual);
-      }
-      assertArrayEquals(b, actual);
-    } finally {
-      ensureAllDatanodeAlive();
-    }
-  }
-
-  @Test
-  public void testHeartbeat() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
-    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    final FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f,
-      true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop);
-    Thread.sleep(READ_TIMEOUT_MS * 2);
-    // the connection to datanode should still alive.
-    writeAndVerify(eventLoop, FS, f, out);
-  }
-
-  /**
-   * This is important for fencing when recover from RS crash.
-   */
-  @Test
-  public void testCreateParentFailed() throws IOException {
-    Path f = new Path("/" + name.getMethodName() + "/test");
-    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    try {
-      FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, 
(short) 3,
-        FS.getDefaultBlockSize(), eventLoop);
-      fail("should fail with parent does not exist");
-    } catch (RemoteException e) {
-      LOG.info("expected exception caught", e);
-      assertTrue(e.unwrapRemoteException() instanceof FileNotFoundException);
-    }
-  }
-
-  @Test
-  public void testConnectToDatanodeFailed()
-      throws IOException, ClassNotFoundException, NoSuchMethodException, 
IllegalAccessException,
-      InvocationTargetException, InterruptedException, NoSuchFieldException {
-    Field xceiverServerDaemonField = 
DataNode.class.getDeclaredField("dataXceiverServer");
-    xceiverServerDaemonField.setAccessible(true);
-    Class<?> xceiverServerClass = Class
-        .forName("org.apache.hadoop.hdfs.server.datanode.DataXceiverServer");
-    Method numPeersMethod = 
xceiverServerClass.getDeclaredMethod("getNumPeers");
-    numPeersMethod.setAccessible(true);
-    // make one datanode broken
-    TEST_UTIL.getDFSCluster().getDataNodes().get(0).shutdownDatanode(true);
-    try {
-      Path f = new Path("/test");
-      EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-      try {
-        FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, 
(short) 3,
-          FS.getDefaultBlockSize(), eventLoop);
-        fail("should fail with connection error");
-      } catch (IOException e) {
-        LOG.info("expected exception caught", e);
-      }
-      for (DataNode dn : TEST_UTIL.getDFSCluster().getDataNodes()) {
-        Daemon daemon = (Daemon) xceiverServerDaemonField.get(dn);
-        assertEquals(0, numPeersMethod.invoke(daemon.getRunnable()));
-      }
-    } finally {
-      TEST_UTIL.getDFSCluster().restartDataNode(0);
-      ensureAllDatanodeAlive();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/1eac103e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSaslFanOutOneBlockAsyncDFSOutput.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSaslFanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSaslFanOutOneBlockAsyncDFSOutput.java
deleted file mode 100644
index 2f5e2ff..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSaslFanOutOneBlockAsyncDFSOutput.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.util;
-
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATA_ENCRYPTION_ALGORITHM_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Properties;
-import java.util.concurrent.ExecutionException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-import org.apache.hadoop.hbase.security.token.TestGenerateDelegationToken;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.minikdc.MiniKdc;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-import io.netty.channel.EventLoop;
-import io.netty.channel.EventLoopGroup;
-import io.netty.channel.nio.NioEventLoopGroup;
-
-@RunWith(Parameterized.class)
-@Category({ MiscTests.class, MediumTests.class })
-public class TestSaslFanOutOneBlockAsyncDFSOutput {
-
-  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
-
-  private static DistributedFileSystem FS;
-
-  private static EventLoopGroup EVENT_LOOP_GROUP;
-
-  private static int READ_TIMEOUT_MS = 200000;
-
-  private static final File KEYTAB_FILE = new File(
-      TEST_UTIL.getDataTestDir("keytab").toUri().getPath());
-
-  private static MiniKdc KDC;
-
-  private static String HOST = "localhost";
-
-  private static String USERNAME;
-
-  private static String PRINCIPAL;
-
-  private static String HTTP_PRINCIPAL;
-  @Rule
-  public TestName name = new TestName();
-
-  @Parameter(0)
-  public String protection;
-
-  @Parameter(1)
-  public String encryptionAlgorithm;
-
-  @Parameters(name = "{index}: protection={0}, encryption={1}")
-  public static Iterable<Object[]> data() {
-    List<Object[]> params = new ArrayList<>();
-    for (String protection : Arrays.asList("authentication", "integrity", 
"privacy")) {
-      for (String encryptionAlgorithm : Arrays.asList("", "3des", "rc4")) {
-        params.add(new Object[] { protection, encryptionAlgorithm });
-      }
-    }
-    return params;
-  }
-
-  private static void setHdfsSecuredConfiguration(Configuration conf) throws 
Exception {
-    // change XXX_USER_NAME_KEY to XXX_KERBEROS_PRINCIPAL_KEY after we drop 
support for hadoop-2.4.1
-    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
-    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
-    conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
-    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
-    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-      HTTP_PRINCIPAL + "@" + KDC.getRealm());
-    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
-    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
-    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
-
-    File keystoresDir = new 
File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
-    keystoresDir.mkdirs();
-    String sslConfDir = 
KeyStoreTestUtil.getClasspathDir(TestGenerateDelegationToken.class);
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), 
sslConfDir, conf, false);
-
-    conf.setBoolean("ignore.secure.ports.for.testing", true);
-  }
-
-  @BeforeClass
-  public static void setUpBeforeClass() throws Exception {
-    
Logger.getLogger("org.apache.hadoop.hdfs.StateChange").setLevel(Level.DEBUG);
-    Logger.getLogger("BlockStateChange").setLevel(Level.DEBUG);
-    EVENT_LOOP_GROUP = new NioEventLoopGroup();
-    TEST_UTIL.getConfiguration().setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, 
READ_TIMEOUT_MS);
-    Properties conf = MiniKdc.createConf();
-    conf.put(MiniKdc.DEBUG, true);
-    KDC = new MiniKdc(conf, new 
File(TEST_UTIL.getDataTestDir("kdc").toUri().getPath()));
-    KDC.start();
-    USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
-    PRINCIPAL = USERNAME + "/" + HOST;
-    HTTP_PRINCIPAL = "HTTP/" + HOST;
-    KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
-    setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration());
-    HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
-    HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + 
KDC.getRealm());
-    HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration());
-    UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration());
-  }
-
-  @AfterClass
-  public static void tearDownAfterClass() throws IOException, 
InterruptedException {
-    if (EVENT_LOOP_GROUP != null) {
-      EVENT_LOOP_GROUP.shutdownGracefully().sync();
-    }
-    if (KDC != null) {
-      KDC.stop();
-    }
-  }
-
-  @Before
-  public void setUp() throws Exception {
-    TEST_UTIL.getConfiguration().set("dfs.data.transfer.protection", 
protection);
-    if (StringUtils.isBlank(encryptionAlgorithm)) {
-      TEST_UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, 
false);
-      TEST_UTIL.getConfiguration().unset(DFS_DATA_ENCRYPTION_ALGORITHM_KEY);
-    } else {
-      TEST_UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, 
true);
-      TEST_UTIL.getConfiguration().set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, 
encryptionAlgorithm);
-    }
-    TEST_UTIL.startMiniDFSCluster(3);
-    FS = TEST_UTIL.getDFSCluster().getFileSystem();
-  }
-
-  @After
-  public void tearDown() throws IOException {
-    TEST_UTIL.shutdownMiniDFSCluster();
-  }
-
-  private Path getTestFile() {
-    return new Path("/" + name.getMethodName().replaceAll("[^0-9a-zA-Z]", 
"_"));
-  }
-
-  @Test
-  public void test() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = getTestFile();
-    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    final FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f,
-      true, false, (short) 1, FS.getDefaultBlockSize(), eventLoop);
-    TestFanOutOneBlockAsyncDFSOutput.writeAndVerify(eventLoop, FS, f, out);
-  }
-}

Reply via email to