http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/AdlMockWebServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/AdlMockWebServer.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/AdlMockWebServer.java
new file mode 100644
index 0000000..55c8f81
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/AdlMockWebServer.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
+import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
+
+import com.squareup.okhttp.mockwebserver.MockWebServer;
+
+import org.junit.After;
+import org.junit.Before;
+
+/**
+ * Mock server to simulate Adls backend calls. This infrastructure is 
expandable
+ * to override expected server response based on the derived test 
functionality.
+ * Common functionality to generate token information before request is send to
+ * adls backend is also managed within AdlMockWebServer implementation using
+ * {@link org.apache.hadoop.fs.adl.common.CustomMockTokenProvider}.
+ */
+public class AdlMockWebServer {
+  // Create a MockWebServer. These are lean enough that you can create a new
+  // instance for every unit test.
+  private MockWebServer server = null;
+  private TestableAdlFileSystem fs = null;
+  private int port = 0;
+  private Configuration conf = new Configuration();
+
+  public MockWebServer getMockServer() {
+    return server;
+  }
+
+  public TestableAdlFileSystem getMockAdlFileSystem() {
+    return fs;
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Before
+  public void preTestSetup() throws IOException, URISyntaxException {
+    server = new MockWebServer();
+
+    // Start the server.
+    server.start();
+
+    // Ask the server for its URL. You'll need this to make HTTP requests.
+    URL baseUrl = server.getUrl("");
+    port = baseUrl.getPort();
+
+    // Exercise your application code, which should make those HTTP requests.
+    // Responses are returned in the same order that they are enqueued.
+    fs = new TestableAdlFileSystem();
+
+    conf.setClass(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
+        CustomMockTokenProvider.class, AzureADTokenProvider.class);
+
+    URI uri = new URI("adl://localhost:" + port);
+    fs.initialize(uri, conf);
+  }
+
+  @After
+  public void postTestSetup() throws IOException {
+    fs.close();
+    server.shutdown();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestACLFeatures.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestACLFeatures.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestACLFeatures.java
new file mode 100644
index 0000000..b420daa
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestACLFeatures.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.AccessControlException;
+
+import com.squareup.okhttp.mockwebserver.MockResponse;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Stub adl server and test acl data conversion within SDK and Hadoop adl
+ * client.
+ */
+public class TestACLFeatures extends AdlMockWebServer {
+
+  @Test(expected=AccessControlException.class)
+  public void testModifyAclEntries() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    List<AclEntry> entries = new ArrayList<AclEntry>();
+    AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
+    aclEntryBuilder.setName("hadoop");
+    aclEntryBuilder.setType(AclEntryType.USER);
+    aclEntryBuilder.setPermission(FsAction.ALL);
+    aclEntryBuilder.setScope(AclEntryScope.ACCESS);
+    entries.add(aclEntryBuilder.build());
+
+    aclEntryBuilder.setName("hdfs");
+    aclEntryBuilder.setType(AclEntryType.GROUP);
+    aclEntryBuilder.setPermission(FsAction.READ_WRITE);
+    aclEntryBuilder.setScope(AclEntryScope.DEFAULT);
+    entries.add(aclEntryBuilder.build());
+
+    getMockAdlFileSystem().modifyAclEntries(new Path("/test1/test2"), entries);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem()
+        .modifyAclEntries(new Path("/test1/test2"), entries);
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testRemoveAclEntriesWithOnlyUsers()
+      throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    List<AclEntry> entries = new ArrayList<AclEntry>();
+    AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
+    aclEntryBuilder.setName("hadoop");
+    aclEntryBuilder.setType(AclEntryType.USER);
+    entries.add(aclEntryBuilder.build());
+
+    getMockAdlFileSystem().removeAclEntries(new Path("/test1/test2"), entries);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem()
+        .removeAclEntries(new Path("/test1/test2"), entries);
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testRemoveAclEntries() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    List<AclEntry> entries = new ArrayList<AclEntry>();
+    AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
+    aclEntryBuilder.setName("hadoop");
+    aclEntryBuilder.setType(AclEntryType.USER);
+    aclEntryBuilder.setPermission(FsAction.ALL);
+    aclEntryBuilder.setScope(AclEntryScope.ACCESS);
+    entries.add(aclEntryBuilder.build());
+
+    aclEntryBuilder.setName("hdfs");
+    aclEntryBuilder.setType(AclEntryType.GROUP);
+    aclEntryBuilder.setPermission(FsAction.READ_WRITE);
+    aclEntryBuilder.setScope(AclEntryScope.DEFAULT);
+    entries.add(aclEntryBuilder.build());
+
+    getMockAdlFileSystem().removeAclEntries(new Path("/test1/test2"), entries);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem()
+        .removeAclEntries(new Path("/test1/test2"), entries);
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testRemoveDefaultAclEntries()
+      throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().removeDefaultAcl(new Path("/test1/test2"));
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem().removeDefaultAcl(new Path("/test1/test2"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testRemoveAcl() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().removeAcl(new Path("/test1/test2"));
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem().removeAcl(new Path("/test1/test2"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testSetAcl() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    List<AclEntry> entries = new ArrayList<AclEntry>();
+    AclEntry.Builder aclEntryBuilder = new AclEntry.Builder();
+    aclEntryBuilder.setName("hadoop");
+    aclEntryBuilder.setType(AclEntryType.USER);
+    aclEntryBuilder.setPermission(FsAction.ALL);
+    aclEntryBuilder.setScope(AclEntryScope.ACCESS);
+    entries.add(aclEntryBuilder.build());
+
+    aclEntryBuilder.setName("hdfs");
+    aclEntryBuilder.setType(AclEntryType.GROUP);
+    aclEntryBuilder.setPermission(FsAction.READ_WRITE);
+    aclEntryBuilder.setScope(AclEntryScope.DEFAULT);
+    entries.add(aclEntryBuilder.build());
+
+    getMockAdlFileSystem().setAcl(new Path("/test1/test2"), entries);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem().setAcl(new Path("/test1/test2"), entries);
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testCheckAccess() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.ALL);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.EXECUTE);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.READ);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem()
+        .access(new Path("/test1/test2"), FsAction.READ_EXECUTE);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem()
+        .access(new Path("/test1/test2"), FsAction.READ_WRITE);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.NONE);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().access(new Path("/test1/test2"), FsAction.WRITE);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem()
+        .access(new Path("/test1/test2"), FsAction.WRITE_EXECUTE);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem()
+        .access(new Path("/test1/test2"), FsAction.WRITE_EXECUTE);
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testSetPermission() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem()
+        .setPermission(new Path("/test1/test2"), FsPermission.getDefault());
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem()
+        .setPermission(new Path("/test1/test2"), FsPermission.getDefault());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testSetOwner() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200));
+    getMockAdlFileSystem().setOwner(new Path("/test1/test2"), "hadoop", 
"hdfs");
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem()
+        .setOwner(new Path("/test1/test2"), "hadoop", "hdfs");
+  }
+
+  @Test
+  public void getAclStatusAsExpected() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200)
+        .setBody(TestADLResponseData.getGetAclStatusJSONResponse()));
+    AclStatus aclStatus = getMockAdlFileSystem()
+        .getAclStatus(new Path("/test1/test2"));
+    Assert.assertEquals(aclStatus.getGroup(), "supergroup");
+    Assert.assertEquals(aclStatus.getOwner(), "hadoop");
+    Assert.assertEquals((Short) aclStatus.getPermission().toShort(),
+        Short.valueOf("775", 8));
+
+    for (AclEntry entry : aclStatus.getEntries()) {
+      if (!(entry.toString().equalsIgnoreCase("user:carla:rw-") || entry
+          .toString().equalsIgnoreCase("group::r-x"))) {
+        Assert.fail("Unexpected entry : " + entry.toString());
+      }
+    }
+  }
+
+  @Test(expected=FileNotFoundException.class)
+  public void getAclStatusNotExists() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(404)
+        .setBody(TestADLResponseData.getFileNotFoundException()));
+
+    getMockAdlFileSystem().getAclStatus(new Path("/test1/test2"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testAclStatusDenied() throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(403)
+        .setBody(TestADLResponseData.getAccessControlException()));
+
+    getMockAdlFileSystem().getAclStatus(new Path("/test1/test2"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
index 1c4fcab..24eb314 100644
--- 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
@@ -24,15 +24,16 @@ import org.apache.hadoop.fs.FileStatus;
 import java.util.Random;
 
 /**
- * This class is responsible to provide generic test methods for mock up test
- * to generate stub response for a network request.
+ * Mock up response data returned from Adl storage account.
  */
 public final class TestADLResponseData {
 
-  private TestADLResponseData() {}
+  private TestADLResponseData() {
+
+  }
 
   public static String getGetFileStatusJSONResponse(FileStatus status) {
-    String str = "{\"FileStatus\":{\"length\":" + status.getLen() + "," +
+    return "{\"FileStatus\":{\"length\":" + status.getLen() + "," +
         "\"pathSuffix\":\"\",\"type\":\"" + (status.isDirectory() ?
         "DIRECTORY" :
         "FILE") + "\"" +
@@ -42,21 +43,27 @@ public final class TestADLResponseData {
         ",\"replication\":" + status.getReplication() + ",\"permission\":\""
         + status.getPermission() + "\",\"owner\":\"" + status.getOwner()
         + "\",\"group\":\"" + status.getGroup() + "\"}}";
-
-    return str;
   }
 
   public static String getGetFileStatusJSONResponse() {
     return getGetFileStatusJSONResponse(4194304);
   }
 
+  public static String getGetAclStatusJSONResponse() {
+    return "{\n" + "    \"AclStatus\": {\n" + "        \"entries\": [\n"
+        + "            \"user:carla:rw-\", \n" + "            \"group::r-x\"\n"
+        + "        ], \n" + "        \"group\": \"supergroup\", \n"
+        + "        \"owner\": \"hadoop\", \n"
+        + "        \"permission\":\"775\",\n" + "        \"stickyBit\": 
false\n"
+        + "    }\n" + "}";
+  }
+
   public static String getGetFileStatusJSONResponse(long length) {
-    String str = "{\"FileStatus\":{\"length\":" + length + "," +
+    return "{\"FileStatus\":{\"length\":" + length + "," +
         "\"pathSuffix\":\"\",\"type\":\"FILE\",\"blockSize\":268435456," +
         "\"accessTime\":1452103827023,\"modificationTime\":1452103827023," +
         "\"replication\":0,\"permission\":\"777\"," +
         "\"owner\":\"NotSupportYet\",\"group\":\"NotSupportYet\"}}";
-    return str;
   }
 
   public static String getListFileStatusJSONResponse(int dirSize) {
@@ -71,40 +78,60 @@ public final class TestADLResponseData {
     }
 
     list = list.substring(0, list.length() - 1);
-    String str = "{\"FileStatuses\":{\"FileStatus\":[" + list + "]}}";
-
-    return str;
+    return "{\"FileStatuses\":{\"FileStatus\":[" + list + "]}}";
   }
 
   public static String getJSONResponse(boolean status) {
-    String str = "{\"boolean\":" + status + "}";
-    return str;
+    return "{\"boolean\":" + status + "}";
   }
 
   public static String getErrorIllegalArgumentExceptionJSONResponse() {
-    String str = "{\n" +
+    return "{\n" +
         "  \"RemoteException\":\n" +
         "  {\n" +
         "    \"exception\"    : \"IllegalArgumentException\",\n" +
         "    \"javaClassName\": \"java.lang.IllegalArgumentException\",\n" +
-        "    \"message\"      : \"Bad Offset 0x83090015\"" +
+        "    \"message\"      : \"Invalid\"" +
         "  }\n" +
         "}";
+  }
 
-    return str;
+  public static String getErrorBadOffsetExceptionJSONResponse() {
+    return "{\n" +
+        "  \"RemoteException\":\n" +
+        "  {\n" +
+        "    \"exception\"    : \"BadOffsetException\",\n" +
+        "    \"javaClassName\": \"org.apache.hadoop.fs.adl"
+        + ".BadOffsetException\",\n" +
+        "    \"message\"      : \"Invalid\"" +
+        "  }\n" +
+        "}";
   }
 
   public static String getErrorInternalServerExceptionJSONResponse() {
-    String str = "{\n" +
+    return "{\n" +
         "  \"RemoteException\":\n" +
         "  {\n" +
-        "    \"exception\"    : \"RumtimeException\",\n" +
-        "    \"javaClassName\": \"java.lang.RumtimeException\",\n" +
+        "    \"exception\"    : \"RuntimeException\",\n" +
+        "    \"javaClassName\": \"java.lang.RuntimeException\",\n" +
         "    \"message\"      : \"Internal Server Error\"" +
         "  }\n" +
         "}";
+  }
+
+  public static String getAccessControlException() {
+    return "{\n" + "  \"RemoteException\":\n" + "  {\n"
+        + "    \"exception\"    : \"AccessControlException\",\n"
+        + "    \"javaClassName\": \"org.apache.hadoop.security"
+        + ".AccessControlException\",\n"
+        + "    \"message\"      : \"Permission denied: ...\"\n" + "  }\n" + 
"}";
+  }
 
-    return str;
+  public static String getFileNotFoundException() {
+    return "{\n" + "  \"RemoteException\":\n" + "  {\n"
+        + "    \"exception\"    : \"FileNotFoundException\",\n"
+        + "    \"javaClassName\": \"java.io.FileNotFoundException\",\n"
+        + "    \"message\"      : \"File does not exist\"\n" + "  }\n" + "}";
   }
 
   public static byte[] getRandomByteArrayData() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
new file mode 100644
index 0000000..734256a
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.common.Parallelized;
+import org.apache.hadoop.fs.adl.common.TestDataForRead;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.ByteArrayInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Random;
+
+import static org.apache.hadoop.fs.adl.AdlConfKeys.READ_AHEAD_BUFFER_SIZE_KEY;
+
+/**
+ * This class is responsible for stress positional reads vs number of network
+ * calls required by to fetch the amount of data. Test does ensure the data
+ * integrity and order of the data is maintained.
+ */
+@RunWith(Parallelized.class)
+public class TestAdlRead extends AdlMockWebServer {
+
+  private TestDataForRead testData;
+
+  public TestAdlRead(TestDataForRead testData) {
+    Configuration configuration = new Configuration();
+    configuration.setInt(READ_AHEAD_BUFFER_SIZE_KEY, 4 * 1024);
+    setConf(configuration);
+    this.testData = testData;
+  }
+
+  @Parameterized.Parameters(name = "{index}")
+  public static Collection testDataForReadOperation() {
+    return Arrays.asList(new Object[][] {
+
+        //--------------------------
+        // Test Data
+        //--------------------------
+        {new TestDataForRead("Hello World".getBytes(), 2, 1000, true)},
+        {new TestDataForRead(
+            ("the problem you appear to be wrestling with is that this doesn't 
"
+                + "display very well. ").getBytes(), 2, 1000, true)},
+        {new 
TestDataForRead(("您的數據是寶貴的資產,以您的組織,並有當前和未來價值。由於這個原å›
 ï¼Œ"
+            + 
"所有的數據應存儲以供將來分析。今天,這往往是不這樣做,"
 + "因為傳統的分析基礎架構的限制,"
+            + "像模式的é 
å®šç¾©ï¼Œå­˜å„²å¤§æ•¸æ“šé›†å’Œä¸åŒçš„數據筒倉的傳播的成本。"
+            + "為了應對這一挑戰,數據湖面概念被引å…
¥ä½œç‚ºä¸€å€‹ä¼æ¥­ç´šå­˜å„²åº«ä¾†å­˜å„²æ‰€æœ‰"
+            + 
"類型的在一個地方收集到的數據。對於運作和探索性分析的目的,所有類型的"
 + "數據可以定義需求或模式之前被存儲在數據湖。")
+            .getBytes(), 2, 1000, true)}, {new TestDataForRead(
+        TestADLResponseData.getRandomByteArrayData(4 * 1024), 2, 10, true)},
+        {new TestDataForRead(TestADLResponseData.getRandomByteArrayData(100), 
2,
+            1000, true)}, {new TestDataForRead(
+        TestADLResponseData.getRandomByteArrayData(1 * 1024), 2, 50, true)},
+        {new TestDataForRead(
+            TestADLResponseData.getRandomByteArrayData(8 * 1024), 3, 10,
+            false)}, {new TestDataForRead(
+        TestADLResponseData.getRandomByteArrayData(16 * 1024), 5, 10, false)},
+        {new TestDataForRead(
+            TestADLResponseData.getRandomByteArrayData(32 * 1024), 9, 10,
+            false)}, {new TestDataForRead(
+        TestADLResponseData.getRandomByteArrayData(64 * 1024), 17, 10,
+        false)}});
+  }
+
+  @Test
+  public void testEntireBytes() throws IOException, InterruptedException {
+    getMockServer().setDispatcher(testData.getDispatcher());
+    FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
+    byte[] expectedData = new byte[testData.getActualData().length];
+    int n = 0;
+    int len = expectedData.length;
+    int off = 0;
+    while (n < len) {
+      int count = in.read(expectedData, off + n, len - n);
+      if (count < 0) {
+        throw new EOFException();
+      }
+      n += count;
+    }
+
+    Assert.assertEquals(testData.getActualData().length, expectedData.length);
+    Assert.assertArrayEquals(expectedData, testData.getActualData());
+    in.close();
+    if (testData.isCheckOfNoOfCalls()) {
+      Assert.assertEquals(testData.getExpectedNoNetworkCall(),
+          getMockServer().getRequestCount());
+    }
+  }
+
+  @Test
+  public void testSeekOperation() throws IOException, InterruptedException {
+    getMockServer().setDispatcher(testData.getDispatcher());
+    FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
+    Random random = new Random();
+    for (int i = 0; i < 1000; ++i) {
+      int position = random.nextInt(testData.getActualData().length);
+      in.seek(position);
+      Assert.assertEquals(in.getPos(), position);
+      Assert.assertEquals(in.read(), testData.getActualData()[position] & 
0xFF);
+    }
+    in.close();
+    if (testData.isCheckOfNoOfCalls()) {
+      Assert.assertEquals(testData.getExpectedNoNetworkCall(),
+          getMockServer().getRequestCount());
+    }
+  }
+
+  @Test
+  public void testReadServerCalls() throws IOException, InterruptedException {
+    getMockServer().setDispatcher(testData.getDispatcher());
+    FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
+    byte[] expectedData = new byte[testData.getActualData().length];
+    in.readFully(expectedData);
+    Assert.assertArrayEquals(expectedData, testData.getActualData());
+    Assert.assertEquals(testData.getExpectedNoNetworkCall(),
+        getMockServer().getRequestCount());
+    in.close();
+  }
+
+  @Test
+  public void testReadFully() throws IOException, InterruptedException {
+    getMockServer().setDispatcher(testData.getDispatcher());
+    FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
+    byte[] expectedData = new byte[testData.getActualData().length];
+    in.readFully(expectedData);
+    Assert.assertArrayEquals(expectedData, testData.getActualData());
+
+    in.readFully(0, expectedData);
+    Assert.assertArrayEquals(expectedData, testData.getActualData());
+
+    in.readFully(0, expectedData, 0, expectedData.length);
+    Assert.assertArrayEquals(expectedData, testData.getActualData());
+    in.close();
+  }
+
+  @Test
+  public void testRandomPositionalReadUsingReadFully()
+      throws IOException, InterruptedException {
+    getMockServer().setDispatcher(testData.getDispatcher());
+    FSDataInputStream in = getMockAdlFileSystem().open(new Path("/test"));
+    ByteArrayInputStream actualData = new ByteArrayInputStream(
+        testData.getActualData());
+    Random random = new Random();
+    for (int i = 0; i < testData.getIntensityOfTest(); ++i) {
+      int offset = random.nextInt(testData.getActualData().length);
+      int length = testData.getActualData().length - offset;
+      byte[] expectedData = new byte[length];
+      byte[] actualDataSubset = new byte[length];
+      actualData.reset();
+      actualData.skip(offset);
+      actualData.read(actualDataSubset, 0, length);
+
+      in.readFully(offset, expectedData, 0, length);
+      Assert.assertArrayEquals(expectedData, actualDataSubset);
+    }
+
+    for (int i = 0; i < testData.getIntensityOfTest(); ++i) {
+      int offset = random.nextInt(testData.getActualData().length);
+      int length = random.nextInt(testData.getActualData().length - offset);
+      byte[] expectedData = new byte[length];
+      byte[] actualDataSubset = new byte[length];
+      actualData.reset();
+      actualData.skip(offset);
+      actualData.read(actualDataSubset, 0, length);
+
+      in.readFully(offset, expectedData, 0, length);
+      Assert.assertArrayEquals(expectedData, actualDataSubset);
+    }
+
+    in.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAzureADTokenProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAzureADTokenProvider.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAzureADTokenProvider.java
new file mode 100644
index 0000000..c94e692
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAzureADTokenProvider.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
+import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider;
+
+import com.microsoft.azure.datalake.store.oauth2.AccessTokenProvider;
+import com.microsoft.azure.datalake.store.oauth2.ClientCredsTokenProvider;
+import 
com.microsoft.azure.datalake.store.oauth2.RefreshTokenBasedTokenProvider;
+
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_CLIENT_ID_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_CLIENT_SECRET_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_REFRESH_TOKEN_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_REFRESH_URL_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_TYPE_KEY;
+import static org.apache.hadoop.fs.adl.TokenProviderType.*;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test appropriate token provider is loaded as per configuration.
+ */
+public class TestAzureADTokenProvider {
+
+  @Test
+  public void testRefreshTokenProvider()
+      throws URISyntaxException, IOException {
+    Configuration conf = new Configuration();
+    conf.set(AZURE_AD_CLIENT_ID_KEY, "MY_CLIENTID");
+    conf.set(AZURE_AD_REFRESH_TOKEN_KEY, "XYZ");
+    conf.setEnum(AZURE_AD_TOKEN_PROVIDER_TYPE_KEY, RefreshToken);
+    conf.set(AZURE_AD_REFRESH_URL_KEY, "http://localhost:8080/refresh";);
+
+    URI uri = new URI("adl://localhost:8080");
+    AdlFileSystem fileSystem = new AdlFileSystem();
+    fileSystem.initialize(uri, conf);
+    AccessTokenProvider tokenProvider = fileSystem.getTokenProvider();
+    Assert.assertTrue(tokenProvider instanceof RefreshTokenBasedTokenProvider);
+  }
+
+  @Test
+  public void testClientCredTokenProvider()
+      throws IOException, URISyntaxException {
+    Configuration conf = new Configuration();
+    conf.set(AZURE_AD_CLIENT_ID_KEY, "MY_CLIENTID");
+    conf.set(AZURE_AD_CLIENT_SECRET_KEY, "XYZ");
+    conf.setEnum(AZURE_AD_TOKEN_PROVIDER_TYPE_KEY, ClientCredential);
+    conf.set(AZURE_AD_REFRESH_URL_KEY, "http://localhost:8080/refresh";);
+
+    URI uri = new URI("adl://localhost:8080");
+    AdlFileSystem fileSystem = new AdlFileSystem();
+    fileSystem.initialize(uri, conf);
+    AccessTokenProvider tokenProvider = fileSystem.getTokenProvider();
+    Assert.assertTrue(tokenProvider instanceof ClientCredsTokenProvider);
+  }
+
+  @Test
+  public void testCustomCredTokenProvider()
+      throws URISyntaxException, IOException {
+    Configuration conf = new Configuration();
+    conf.setClass(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
+        CustomMockTokenProvider.class, AzureADTokenProvider.class);
+
+    URI uri = new URI("adl://localhost:8080");
+    AdlFileSystem fileSystem = new AdlFileSystem();
+    fileSystem.initialize(uri, conf);
+    AccessTokenProvider tokenProvider = fileSystem.getTokenProvider();
+    Assert.assertTrue(tokenProvider instanceof SdkTokenProviderAdapter);
+  }
+
+  @Test
+  public void testInvalidProviderConfigurationForType()
+      throws URISyntaxException, IOException {
+    Configuration conf = new Configuration();
+    URI uri = new URI("adl://localhost:8080");
+    AdlFileSystem fileSystem = new AdlFileSystem();
+    try {
+      fileSystem.initialize(uri, conf);
+      Assert.fail("Initialization should have failed due no token provider "
+          + "configuration");
+    } catch (IllegalArgumentException e) {
+      Assert.assertTrue(
+          e.getMessage().contains("dfs.adls.oauth2.access.token.provider"));
+    }
+    conf.setClass(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
+        CustomMockTokenProvider.class, AzureADTokenProvider.class);
+    fileSystem.initialize(uri, conf);
+  }
+
+  @Test
+  public void testInvalidProviderConfigurationForClassPath()
+      throws URISyntaxException, IOException {
+    Configuration conf = new Configuration();
+    URI uri = new URI("adl://localhost:8080");
+    AdlFileSystem fileSystem = new AdlFileSystem();
+    conf.set(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
+        "wrong.classpath.CustomMockTokenProvider");
+    try {
+      fileSystem.initialize(uri, conf);
+      Assert.fail("Initialization should have failed due invalid provider "
+          + "configuration");
+    } catch (RuntimeException e) {
+      Assert.assertTrue(
+          e.getMessage().contains("wrong.classpath.CustomMockTokenProvider"));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestConcurrentDataReadOperations.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestConcurrentDataReadOperations.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestConcurrentDataReadOperations.java
new file mode 100644
index 0000000..b790562
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestConcurrentDataReadOperations.java
@@ -0,0 +1,299 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import com.squareup.okhttp.mockwebserver.Dispatcher;
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import com.squareup.okhttp.mockwebserver.RecordedRequest;
+import okio.Buffer;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * This class is responsible for testing multiple threads trying to access same
+ * or multiple files from the offset.
+ */
+@RunWith(Parameterized.class)
+public class TestConcurrentDataReadOperations extends AdlMockWebServer {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestConcurrentDataReadOperations.class);
+  private static final Object LOCK = new Object();
+  private static FSDataInputStream commonHandle = null;
+  private int concurrencyLevel;
+
+  public TestConcurrentDataReadOperations(int concurrencyLevel) {
+    this.concurrencyLevel = concurrencyLevel;
+  }
+
+  @Parameterized.Parameters(name = "{index}")
+  public static Collection<?> testDataNumberOfConcurrentRun() {
+    return Arrays.asList(new Object[][] {{1}, {2}, {3}, {4}, {5}});
+  }
+
+  public static byte[] getRandomByteArrayData(int size) {
+    byte[] b = new byte[size];
+    Random rand = new Random();
+    rand.nextBytes(b);
+    return b;
+  }
+
+  private void setDispatcher(final ArrayList<CreateTestData> testData) {
+    getMockServer().setDispatcher(new Dispatcher() {
+      @Override
+      public MockResponse dispatch(RecordedRequest recordedRequest)
+          throws InterruptedException {
+        CreateTestData currentRequest = null;
+        for (CreateTestData local : testData) {
+          if (recordedRequest.getPath().contains(local.path.toString())) {
+            currentRequest = local;
+            break;
+          }
+        }
+
+        if (currentRequest == null) {
+          new MockResponse().setBody("Request data not found")
+              .setResponseCode(501);
+        }
+
+        if (recordedRequest.getRequestLine().contains("op=GETFILESTATUS")) {
+          return new MockResponse().setResponseCode(200).setBody(
+              TestADLResponseData
+                  .getGetFileStatusJSONResponse(currentRequest.data.length));
+        }
+
+        if (recordedRequest.getRequestLine().contains("op=OPEN")) {
+          String request = recordedRequest.getRequestLine();
+          int offset = 0;
+          int byteCount = 0;
+
+          Pattern pattern = Pattern.compile("offset=([0-9]+)");
+          Matcher matcher = pattern.matcher(request);
+          if (matcher.find()) {
+            LOG.debug(matcher.group(1));
+            offset = Integer.parseInt(matcher.group(1));
+          }
+
+          pattern = Pattern.compile("length=([0-9]+)");
+          matcher = pattern.matcher(request);
+          if (matcher.find()) {
+            LOG.debug(matcher.group(1));
+            byteCount = Integer.parseInt(matcher.group(1));
+          }
+
+          Buffer buf = new Buffer();
+          buf.write(currentRequest.data, offset,
+              Math.min(currentRequest.data.length - offset, byteCount));
+          return new MockResponse().setResponseCode(200)
+              .setChunkedBody(buf, 4 * 1024 * 1024);
+        }
+
+        return new MockResponse().setBody("NOT 
SUPPORTED").setResponseCode(501);
+      }
+    });
+  }
+
+  @Before
+  public void resetHandle() {
+    commonHandle = null;
+  }
+
+  @Test
+  public void testParallelReadOnDifferentStreams()
+      throws IOException, InterruptedException, ExecutionException {
+
+    ArrayList<CreateTestData> createTestData = new ArrayList<CreateTestData>();
+
+    Random random = new Random();
+
+    for (int i = 0; i < concurrencyLevel; i++) {
+      CreateTestData testData = new CreateTestData();
+      testData
+          .set(new Path("/test/concurrentRead/" + 
UUID.randomUUID().toString()),
+              getRandomByteArrayData(random.nextInt(1 * 1024 * 1024)));
+      createTestData.add(testData);
+    }
+
+    setDispatcher(createTestData);
+
+    ArrayList<ReadTestData> readTestData = new ArrayList<ReadTestData>();
+    for (CreateTestData local : createTestData) {
+      ReadTestData localReadData = new ReadTestData();
+      localReadData.set(local.path, local.data, 0);
+      readTestData.add(localReadData);
+    }
+
+    runReadTest(readTestData, false);
+  }
+
+  @Test
+  public void testParallelReadOnSameStreams()
+      throws IOException, InterruptedException, ExecutionException {
+    ArrayList<CreateTestData> createTestData = new ArrayList<CreateTestData>();
+
+    Random random = new Random();
+
+    for (int i = 0; i < 1; i++) {
+      CreateTestData testData = new CreateTestData();
+      testData
+          .set(new Path("/test/concurrentRead/" + 
UUID.randomUUID().toString()),
+              getRandomByteArrayData(1024 * 1024));
+      createTestData.add(testData);
+    }
+
+    setDispatcher(createTestData);
+
+    ArrayList<ReadTestData> readTestData = new ArrayList<ReadTestData>();
+    ByteArrayInputStream buffered = new ByteArrayInputStream(
+        createTestData.get(0).data);
+
+    ReadTestData readInitially = new ReadTestData();
+    byte[] initialData = new byte[1024 * 1024];
+    buffered.read(initialData);
+
+    readInitially.set(createTestData.get(0).path, initialData, 0);
+    readTestData.add(readInitially);
+    runReadTest(readTestData, false);
+
+    readTestData.clear();
+
+    for (int i = 0; i < concurrencyLevel * 5; i++) {
+      ReadTestData localReadData = new ReadTestData();
+      int offset = random.nextInt((1024 * 1024) - 1);
+      int length = 1024 * 1024 - offset;
+      byte[] expectedData = new byte[length];
+      buffered.reset();
+      buffered.skip(offset);
+      buffered.read(expectedData);
+      localReadData.set(createTestData.get(0).path, expectedData, offset);
+      readTestData.add(localReadData);
+    }
+
+    runReadTest(readTestData, true);
+  }
+
+  void runReadTest(ArrayList<ReadTestData> testData, boolean useSameStream)
+      throws InterruptedException, ExecutionException {
+
+    ExecutorService executor = Executors.newFixedThreadPool(testData.size());
+    Future[] subtasks = new Future[testData.size()];
+
+    for (int i = 0; i < testData.size(); i++) {
+      subtasks[i] = executor.submit(
+          new ReadConcurrentRunnable(testData.get(i).data, 
testData.get(i).path,
+              testData.get(i).offset, useSameStream));
+    }
+
+    executor.shutdown();
+
+    // wait until all tasks are finished
+    executor.awaitTermination(120, TimeUnit.SECONDS);
+
+    for (int i = 0; i < testData.size(); ++i) {
+      Assert.assertTrue((Boolean) subtasks[i].get());
+    }
+  }
+
+  class ReadTestData {
+    private Path path;
+    private byte[] data;
+    private int offset;
+
+    public void set(Path filePath, byte[] dataToBeRead, int fromOffset) {
+      this.path = filePath;
+      this.data = dataToBeRead;
+      this.offset = fromOffset;
+    }
+  }
+
+  class CreateTestData {
+    private Path path;
+    private byte[] data;
+
+    public void set(Path filePath, byte[] dataToBeWritten) {
+      this.path = filePath;
+      this.data = dataToBeWritten;
+    }
+  }
+
+  class ReadConcurrentRunnable implements Callable<Boolean> {
+    private Path path;
+    private int offset;
+    private byte[] expectedData;
+    private boolean useSameStream;
+
+    public ReadConcurrentRunnable(byte[] expectedData, Path path, int offset,
+        boolean useSameStream) {
+      this.path = path;
+      this.offset = offset;
+      this.expectedData = expectedData;
+      this.useSameStream = useSameStream;
+    }
+
+    public Boolean call() throws IOException {
+      try {
+        FSDataInputStream in;
+        if (useSameStream) {
+          synchronized (LOCK) {
+            if (commonHandle == null) {
+              commonHandle = getMockAdlFileSystem().open(path);
+            }
+            in = commonHandle;
+          }
+        } else {
+          in = getMockAdlFileSystem().open(path);
+        }
+
+        byte[] actualData = new byte[expectedData.length];
+        in.readFully(offset, actualData);
+        Assert.assertArrayEquals("Path :" + path.toString() + " did not 
match.",
+            expectedData, actualData);
+        if (!useSameStream) {
+          in.close();
+        }
+      } catch (IOException e) {
+        e.printStackTrace();
+        return false;
+      }
+      return true;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestCustomTokenProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestCustomTokenProvider.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestCustomTokenProvider.java
new file mode 100644
index 0000000..c594c65
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestCustomTokenProvider.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.apache.hadoop.fs.adl.AdlConfKeys.ADL_BLOCK_SIZE;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
+
+/**
+ * Test access token provider behaviour with custom token provider and for 
token
+ * provider cache is enabled.
+ */
+@RunWith(Parameterized.class)
+public class TestCustomTokenProvider extends AdlMockWebServer {
+  private static final long TEN_MINUTES_IN_MILIS = 600000;
+  private int backendCallCount;
+  private int expectedCallbackToAccessToken;
+  private TestableAdlFileSystem[] fileSystems;
+  private Class typeOfTokenProviderClass;
+  private long expiryFromNow;
+  private int fsObjectCount;
+
+  public TestCustomTokenProvider(Class typeOfTokenProviderClass,
+      long expiryFromNow, int fsObjectCount, int backendCallCount,
+      int expectedCallbackToAccessToken)
+      throws IllegalAccessException, InstantiationException, 
URISyntaxException,
+      IOException {
+    this.typeOfTokenProviderClass = typeOfTokenProviderClass;
+    this.expiryFromNow = expiryFromNow;
+    this.fsObjectCount = fsObjectCount;
+    this.backendCallCount = backendCallCount;
+    this.expectedCallbackToAccessToken = expectedCallbackToAccessToken;
+  }
+
+  @Parameterized.Parameters(name = "{index}")
+  public static Collection testDataForTokenProvider() {
+    return Arrays.asList(new Object[][] {
+        // Data set in order
+        // INPUT - CustomTokenProvider class to load
+        // INPUT - expiry time in milis. Subtract from current time
+        // INPUT - No. of FileSystem object
+        // INPUT - No. of backend calls per FileSystem object
+        // EXPECTED - Number of callbacks to get token after test finished.
+        {CustomMockTokenProvider.class, 0, 1, 1, 1},
+        {CustomMockTokenProvider.class, TEN_MINUTES_IN_MILIS, 1, 1, 1},
+        {CustomMockTokenProvider.class, TEN_MINUTES_IN_MILIS, 2, 1, 2},
+        {CustomMockTokenProvider.class, TEN_MINUTES_IN_MILIS, 10, 10, 10}});
+  }
+
+  /**
+   * Explicitly invoked init so that base class mock server is setup before
+   * test data initialization is done.
+   *
+   * @throws IOException
+   * @throws URISyntaxException
+   */
+  public void init() throws IOException, URISyntaxException {
+    Configuration configuration = new Configuration();
+    configuration.set(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
+        typeOfTokenProviderClass.getName());
+    fileSystems = new TestableAdlFileSystem[fsObjectCount];
+    URI uri = new URI("adl://localhost:" + getPort());
+
+    for (int i = 0; i < fsObjectCount; ++i) {
+      fileSystems[i] = new TestableAdlFileSystem();
+      fileSystems[i].initialize(uri, configuration);
+
+      ((CustomMockTokenProvider) fileSystems[i].getAzureTokenProvider())
+          .setExpiryTimeInMillisAfter(expiryFromNow);
+    }
+  }
+
+  @Test
+  public void testCustomTokenManagement()
+      throws IOException, URISyntaxException {
+    int accessTokenCallbackDuringExec = 0;
+    init();
+    for (TestableAdlFileSystem tfs : fileSystems) {
+      for (int i = 0; i < backendCallCount; ++i) {
+        getMockServer().enqueue(new MockResponse().setResponseCode(200)
+            .setBody(TestADLResponseData.getGetFileStatusJSONResponse()));
+        FileStatus fileStatus = tfs.getFileStatus(new Path("/test1/test2"));
+        Assert.assertTrue(fileStatus.isFile());
+        Assert.assertEquals("adl://" + getMockServer().getHostName() + ":" +
+            getMockServer().getPort() + "/test1/test2",
+            fileStatus.getPath().toString());
+        Assert.assertEquals(4194304, fileStatus.getLen());
+        Assert.assertEquals(ADL_BLOCK_SIZE, fileStatus.getBlockSize());
+        Assert.assertEquals(1, fileStatus.getReplication());
+        Assert
+            .assertEquals(new FsPermission("777"), fileStatus.getPermission());
+        Assert.assertEquals("NotSupportYet", fileStatus.getOwner());
+        Assert.assertEquals("NotSupportYet", fileStatus.getGroup());
+      }
+
+      accessTokenCallbackDuringExec += ((CustomMockTokenProvider) tfs
+          .getAzureTokenProvider()).getAccessTokenRequestCount();
+    }
+
+    Assert.assertEquals(expectedCallbackToAccessToken,
+        accessTokenCallbackDuringExec);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
index 4268b27..08c805e 100644
--- 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
@@ -22,15 +22,18 @@ package org.apache.hadoop.fs.adl;
 import com.squareup.okhttp.mockwebserver.MockResponse;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.common.AdlMockWebServer;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Time;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.net.URISyntaxException;
 
+import static org.apache.hadoop.fs.adl.AdlConfKeys.ADL_BLOCK_SIZE;
+
 /**
  * This class is responsible for testing local getFileStatus implementation
  * to cover correct parsing of successful and error JSON response
@@ -39,6 +42,8 @@ import java.net.URISyntaxException;
  * org.apache.hadoop.fs.adl.live testing package.
  */
 public class TestGetFileStatus extends AdlMockWebServer {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestGetFileStatus.class);
 
   @Test
   public void getFileStatusReturnsAsExpected()
@@ -46,20 +51,20 @@ public class TestGetFileStatus extends AdlMockWebServer {
     getMockServer().enqueue(new MockResponse().setResponseCode(200)
         .setBody(TestADLResponseData.getGetFileStatusJSONResponse()));
     long startTime = Time.monotonicNow();
-    FileStatus fileStatus = getMockAdlFileSystem().getFileStatus(
-        new Path("/test1/test2"));
+    FileStatus fileStatus = getMockAdlFileSystem()
+        .getFileStatus(new Path("/test1/test2"));
     long endTime = Time.monotonicNow();
-    System.out.println("Time : " + (endTime - startTime));
+    LOG.debug("Time : " + (endTime - startTime));
     Assert.assertTrue(fileStatus.isFile());
-    Assert.assertEquals(fileStatus.getPath().toString(),
-        "adl://" + getMockServer().getHostName() + ":"
-            + getMockServer().getPort()
-            + "/test1/test2");
-    Assert.assertEquals(fileStatus.getLen(), 4194304);
-    Assert.assertEquals(fileStatus.getBlockSize(), 268435456);
-    Assert.assertEquals(fileStatus.getReplication(), 0);
-    Assert.assertEquals(fileStatus.getPermission(), new FsPermission("777"));
-    Assert.assertEquals(fileStatus.getOwner(), "NotSupportYet");
-    Assert.assertEquals(fileStatus.getGroup(), "NotSupportYet");
+    Assert.assertEquals("adl://" + getMockServer().getHostName() + ":" +
+        getMockServer().getPort() + "/test1/test2",
+        fileStatus.getPath().toString());
+    Assert.assertEquals(4194304, fileStatus.getLen());
+    Assert.assertEquals(ADL_BLOCK_SIZE, fileStatus.getBlockSize());
+    Assert.assertEquals(1, fileStatus.getReplication());
+    Assert.assertEquals(new FsPermission("777"), fileStatus.getPermission());
+    Assert.assertEquals("NotSupportYet", fileStatus.getOwner());
+    Assert.assertEquals("NotSupportYet", fileStatus.getGroup());
   }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
index 82c2494..dd27a10 100644
--- 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
@@ -22,10 +22,11 @@ package org.apache.hadoop.fs.adl;
 import com.squareup.okhttp.mockwebserver.MockResponse;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.common.AdlMockWebServer;
 import org.apache.hadoop.util.Time;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -37,15 +38,18 @@ import java.io.IOException;
  */
 public class TestListStatus extends AdlMockWebServer {
 
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestListStatus.class);
+
   @Test
   public void listStatusReturnsAsExpected() throws IOException {
     getMockServer().enqueue(new MockResponse().setResponseCode(200)
         .setBody(TestADLResponseData.getListFileStatusJSONResponse(10)));
     long startTime = Time.monotonicNow();
-    FileStatus[] ls = getMockAdlFileSystem().listStatus(
-        new Path("/test1/test2"));
+    FileStatus[] ls = getMockAdlFileSystem()
+        .listStatus(new Path("/test1/test2"));
     long endTime = Time.monotonicNow();
-    System.out.println("Time : " + (endTime - startTime));
+    LOG.debug("Time : " + (endTime - startTime));
     Assert.assertEquals(ls.length, 10);
 
     getMockServer().enqueue(new MockResponse().setResponseCode(200)
@@ -53,7 +57,7 @@ public class TestListStatus extends AdlMockWebServer {
     startTime = Time.monotonicNow();
     ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
     endTime = Time.monotonicNow();
-    System.out.println("Time : " + (endTime - startTime));
+    LOG.debug("Time : " + (endTime - startTime));
     Assert.assertEquals(ls.length, 200);
 
     getMockServer().enqueue(new MockResponse().setResponseCode(200)
@@ -61,12 +65,12 @@ public class TestListStatus extends AdlMockWebServer {
     startTime = Time.monotonicNow();
     ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
     endTime = Time.monotonicNow();
-    System.out.println("Time : " + (endTime - startTime));
+    LOG.debug("Time : " + (endTime - startTime));
     Assert.assertEquals(ls.length, 2048);
   }
 
   @Test
-  public void listStatusonFailure() throws IOException {
+  public void listStatusOnFailure() throws IOException {
     getMockServer().enqueue(new MockResponse().setResponseCode(403).setBody(
         TestADLResponseData.getErrorIllegalArgumentExceptionJSONResponse()));
     FileStatus[] ls = null;
@@ -74,14 +78,18 @@ public class TestListStatus extends AdlMockWebServer {
     try {
       ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
     } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("Bad Offset 0x83090015"));
+      Assert.assertTrue(e.getMessage().contains("Invalid"));
     }
     long endTime = Time.monotonicNow();
-    System.out.println("Time : " + (endTime - startTime));
+    LOG.debug("Time : " + (endTime - startTime));
+
+    // SDK may increase number of retry attempts before error is propagated
+    // to caller. Adding max 10 error responses in the queue to align with SDK.
+    for (int i = 0; i < 10; ++i) {
+      getMockServer().enqueue(new MockResponse().setResponseCode(500).setBody(
+          TestADLResponseData.getErrorInternalServerExceptionJSONResponse()));
+    }
 
-    getMockServer().enqueue(new MockResponse().setResponseCode(500)
-        .setBody(
-            
TestADLResponseData.getErrorInternalServerExceptionJSONResponse()));
     startTime = Time.monotonicNow();
     try {
       ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
@@ -89,7 +97,7 @@ public class TestListStatus extends AdlMockWebServer {
       Assert.assertTrue(e.getMessage().contains("Internal Server Error"));
     }
     endTime = Time.monotonicNow();
-    System.out.println("Time : " + (endTime - startTime));
+    LOG.debug("Time : " + (endTime - startTime));
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestRelativePathFormation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestRelativePathFormation.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestRelativePathFormation.java
new file mode 100644
index 0000000..908f8b8
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestRelativePathFormation.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
+
+/**
+ * This class verifies path conversion to SDK.
+ */
+public class TestRelativePathFormation {
+
+  @Test
+  public void testToRelativePath() throws URISyntaxException, IOException {
+    AdlFileSystem fs = new AdlFileSystem();
+    Configuration configuration = new Configuration();
+    configuration.set(AZURE_AD_TOKEN_PROVIDER_CLASS_KEY,
+        "org.apache.hadoop.fs.adl.common.CustomMockTokenProvider");
+
+    fs.initialize(new URI("adl://temp.account.net"), configuration);
+
+    Assert.assertEquals("/usr", fs.toRelativeFilePath(new Path("/usr")));
+    Assert.assertEquals("/usr",
+        fs.toRelativeFilePath(new Path("adl://temp.account.net/usr")));
+
+    // When working directory is set.
+    fs.setWorkingDirectory(new Path("/a/b/"));
+    Assert.assertEquals("/usr", fs.toRelativeFilePath(new Path("/usr")));
+    Assert.assertEquals("/a/b/usr", fs.toRelativeFilePath(new Path("usr")));
+    Assert.assertEquals("/usr",
+        fs.toRelativeFilePath(new Path("adl://temp.account.net/usr")));
+    Assert.assertEquals("/usr",
+        fs.toRelativeFilePath(new Path("wasb://temp.account.net/usr")));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestValidateConfiguration.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestValidateConfiguration.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestValidateConfiguration.java
new file mode 100644
index 0000000..e3025b2
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestValidateConfiguration.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import static org.apache.hadoop.fs.adl.AdlConfKeys.ADL_BLOCK_SIZE;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .ADL_DEBUG_OVERRIDE_LOCAL_USER_AS_OWNER;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .ADL_DEBUG_SET_LOCAL_USER_AS_OWNER_DEFAULT;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .ADL_EXPERIMENT_POSITIONAL_READ_DEFAULT;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .ADL_EXPERIMENT_POSITIONAL_READ_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.ADL_REPLICATION_FACTOR;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_CLIENT_ID_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_CLIENT_SECRET_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_REFRESH_TOKEN_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.AZURE_AD_REFRESH_URL_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_CLASS_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .AZURE_AD_TOKEN_PROVIDER_TYPE_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .DEFAULT_READ_AHEAD_BUFFER_SIZE;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .DEFAULT_WRITE_AHEAD_BUFFER_SIZE;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.LATENCY_TRACKER_DEFAULT;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.LATENCY_TRACKER_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.READ_AHEAD_BUFFER_SIZE_KEY;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .TOKEN_PROVIDER_TYPE_CLIENT_CRED;
+import static org.apache.hadoop.fs.adl.AdlConfKeys
+    .TOKEN_PROVIDER_TYPE_REFRESH_TOKEN;
+import static org.apache.hadoop.fs.adl.AdlConfKeys.WRITE_BUFFER_SIZE_KEY;
+
+/**
+ * Validate configuration keys defined for adl storage file system instance.
+ */
+public class TestValidateConfiguration {
+
+  @Test
+  public void validateConfigurationKeys() {
+    Assert
+        .assertEquals("dfs.adls.oauth2.refresh.url", AZURE_AD_REFRESH_URL_KEY);
+    Assert.assertEquals("dfs.adls.oauth2.access.token.provider",
+        AZURE_AD_TOKEN_PROVIDER_CLASS_KEY);
+    Assert.assertEquals("dfs.adls.oauth2.client.id", AZURE_AD_CLIENT_ID_KEY);
+    Assert.assertEquals("dfs.adls.oauth2.refresh.token",
+        AZURE_AD_REFRESH_TOKEN_KEY);
+    Assert
+        .assertEquals("dfs.adls.oauth2.credential", 
AZURE_AD_CLIENT_SECRET_KEY);
+    Assert.assertEquals("adl.debug.override.localuserasfileowner",
+        ADL_DEBUG_OVERRIDE_LOCAL_USER_AS_OWNER);
+
+    Assert.assertEquals("dfs.adls.oauth2.access.token.provider.type",
+        AZURE_AD_TOKEN_PROVIDER_TYPE_KEY);
+
+    Assert.assertEquals("adl.feature.client.cache.readahead",
+        READ_AHEAD_BUFFER_SIZE_KEY);
+
+    Assert.assertEquals("adl.feature.client.cache.drop.behind.writes",
+        WRITE_BUFFER_SIZE_KEY);
+
+    Assert.assertEquals("RefreshToken", TOKEN_PROVIDER_TYPE_REFRESH_TOKEN);
+
+    Assert.assertEquals("ClientCredential", TOKEN_PROVIDER_TYPE_CLIENT_CRED);
+
+    Assert.assertEquals("adl.dfs.enable.client.latency.tracker",
+        LATENCY_TRACKER_KEY);
+
+    Assert.assertEquals(true, LATENCY_TRACKER_DEFAULT);
+
+    Assert.assertEquals(true, ADL_EXPERIMENT_POSITIONAL_READ_DEFAULT);
+
+    Assert.assertEquals("adl.feature.experiment.positional.read.enable",
+        ADL_EXPERIMENT_POSITIONAL_READ_KEY);
+
+    Assert.assertEquals(1, ADL_REPLICATION_FACTOR);
+    Assert.assertEquals(256 * 1024 * 1024, ADL_BLOCK_SIZE);
+    Assert.assertEquals(false, ADL_DEBUG_SET_LOCAL_USER_AS_OWNER_DEFAULT);
+    Assert.assertEquals(4 * 1024 * 1024, DEFAULT_READ_AHEAD_BUFFER_SIZE);
+    Assert.assertEquals(4 * 1024 * 1024, DEFAULT_WRITE_AHEAD_BUFFER_SIZE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
index 1e03bbf..4acb39b 100644
--- 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
@@ -20,8 +20,7 @@
 package org.apache.hadoop.fs.adl;
 
 /**
- * This class overrides AdlFileSystem to change transport scheme to http 
instead
- * of https to run against Mock Server.
+ * Mock adl file storage subclass to mock adl storage on local http service.
  */
 public class TestableAdlFileSystem extends AdlFileSystem {
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/CustomMockTokenProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/CustomMockTokenProvider.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/CustomMockTokenProvider.java
new file mode 100644
index 0000000..c48ca0e
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/CustomMockTokenProvider.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl.common;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.Random;
+
+/**
+ * Custom token management without cache enabled.
+ */
+public class CustomMockTokenProvider extends AzureADTokenProvider {
+  private Random random;
+  private long expiryTime;
+  private int accessTokenRequestCount = 0;
+
+  @Override
+  public void initialize(Configuration configuration) throws IOException {
+    random = new Random();
+  }
+
+  @Override
+  public String getAccessToken() throws IOException {
+    accessTokenRequestCount++;
+    return String.valueOf(random.nextInt());
+  }
+
+  @Override
+  public Date getExpiryTime() {
+    Date before10Min = new Date();
+    before10Min.setTime(expiryTime);
+    return before10Min;
+  }
+
+  public void setExpiryTimeInMillisAfter(long timeInMillis) {
+    expiryTime = System.currentTimeMillis() + timeInMillis;
+  }
+
+  public int getAccessTokenRequestCount() {
+    return accessTokenRequestCount;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/ExpectedResponse.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/ExpectedResponse.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/ExpectedResponse.java
new file mode 100644
index 0000000..dc8577d
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/ExpectedResponse.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.common;
+
+import com.squareup.okhttp.mockwebserver.MockResponse;
+
+import java.util.ArrayList;
+
+/**
+ * Supporting class to hold expected MockResponse object along with parameters
+ * for validation in test methods.
+ */
+public class ExpectedResponse {
+  private MockResponse response;
+  private ArrayList<String> expectedQueryParameters = new ArrayList<String>();
+  private int expectedBodySize;
+  private String httpRequestType;
+
+  public int getExpectedBodySize() {
+    return expectedBodySize;
+  }
+
+  public String getHttpRequestType() {
+    return httpRequestType;
+  }
+
+  public ArrayList<String> getExpectedQueryParameters() {
+    return expectedQueryParameters;
+  }
+
+  public MockResponse getResponse() {
+    return response;
+  }
+
+  ExpectedResponse set(MockResponse mockResponse) {
+    this.response = mockResponse;
+    return this;
+  }
+
+  ExpectedResponse addExpectedQueryParam(String param) {
+    expectedQueryParameters.add(param);
+    return this;
+  }
+
+  ExpectedResponse addExpectedBodySize(int bodySize) {
+    this.expectedBodySize = bodySize;
+    return this;
+  }
+
+  ExpectedResponse addExpectedHttpRequestType(String expectedHttpRequestType) {
+    this.httpRequestType = expectedHttpRequestType;
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/Parallelized.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/Parallelized.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/Parallelized.java
new file mode 100644
index 0000000..b08a892
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/Parallelized.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.common;
+
+import org.junit.runners.Parameterized;
+import org.junit.runners.model.RunnerScheduler;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Provided for convenience to execute parametrized test cases concurrently.
+ */
+public class Parallelized extends Parameterized {
+
+  public Parallelized(Class classObj) throws Throwable {
+    super(classObj);
+    setScheduler(new ThreadPoolScheduler());
+  }
+
+  private static class ThreadPoolScheduler implements RunnerScheduler {
+    private ExecutorService executor;
+
+    public ThreadPoolScheduler() {
+      int numThreads = 10;
+      executor = Executors.newFixedThreadPool(numThreads);
+    }
+
+    public void finished() {
+      executor.shutdown();
+      try {
+        executor.awaitTermination(10, TimeUnit.MINUTES);
+      } catch (InterruptedException exc) {
+        throw new RuntimeException(exc);
+      }
+    }
+
+    public void schedule(Runnable childStatement) {
+      executor.submit(childStatement);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/TestDataForRead.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/TestDataForRead.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/TestDataForRead.java
new file mode 100644
index 0000000..509b3f0
--- /dev/null
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/common/TestDataForRead.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.common;
+
+import com.squareup.okhttp.mockwebserver.Dispatcher;
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import com.squareup.okhttp.mockwebserver.RecordedRequest;
+import okio.Buffer;
+import org.apache.hadoop.fs.adl.TestADLResponseData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Supporting class for mock test to validate Adls read operation.
+ */
+public class TestDataForRead {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestDataForRead.class);
+
+  private byte[] actualData;
+  private ArrayList<ExpectedResponse> responses;
+  private Dispatcher dispatcher;
+  private int intensityOfTest;
+  private boolean checkOfNoOfCalls;
+  private int expectedNoNetworkCall;
+
+  public TestDataForRead(final byte[] actualData, int expectedNoNetworkCall,
+      int intensityOfTest, boolean checkOfNoOfCalls) {
+
+    this.checkOfNoOfCalls = checkOfNoOfCalls;
+    this.actualData = actualData;
+    responses = new ArrayList<ExpectedResponse>();
+    this.expectedNoNetworkCall = expectedNoNetworkCall;
+    this.intensityOfTest = intensityOfTest;
+
+    dispatcher = new Dispatcher() {
+      @Override
+      public MockResponse dispatch(RecordedRequest recordedRequest)
+          throws InterruptedException {
+
+        if (recordedRequest.getRequestLine().contains("op=GETFILESTATUS")) {
+          return new MockResponse().setResponseCode(200).setBody(
+              TestADLResponseData
+                  .getGetFileStatusJSONResponse(actualData.length));
+        }
+
+        if (recordedRequest.getRequestLine().contains("op=OPEN")) {
+          String request = recordedRequest.getRequestLine();
+          int offset = 0;
+          int byteCount = 0;
+
+          Pattern pattern = Pattern.compile("offset=([0-9]+)");
+          Matcher matcher = pattern.matcher(request);
+          if (matcher.find()) {
+            LOG.debug(matcher.group(1));
+            offset = Integer.parseInt(matcher.group(1));
+          }
+
+          pattern = Pattern.compile("length=([0-9]+)");
+          matcher = pattern.matcher(request);
+          if (matcher.find()) {
+            LOG.debug(matcher.group(1));
+            byteCount = Integer.parseInt(matcher.group(1));
+          }
+
+          Buffer buf = new Buffer();
+          buf.write(actualData, offset,
+              Math.min(actualData.length - offset, byteCount));
+          return new MockResponse().setResponseCode(200)
+              .setChunkedBody(buf, 4 * 1024 * 1024);
+        }
+
+        return new MockResponse().setBody("NOT 
SUPPORTED").setResponseCode(501);
+      }
+    };
+  }
+
+  public boolean isCheckOfNoOfCalls() {
+    return checkOfNoOfCalls;
+  }
+
+  public int getExpectedNoNetworkCall() {
+    return expectedNoNetworkCall;
+  }
+
+  public int getIntensityOfTest() {
+    return intensityOfTest;
+  }
+
+  public byte[] getActualData() {
+    return actualData;
+  }
+
+  public ArrayList<ExpectedResponse> getResponses() {
+    return responses;
+  }
+
+  public Dispatcher getDispatcher() {
+    return dispatcher;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
index d257768..f7f0b71 100644
--- 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
@@ -21,36 +21,39 @@ package org.apache.hadoop.fs.adl.live;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.adl.AdlFileSystem;
 
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
 
 /**
- * Utility class to configure real Adls storage to run Live test suite against.
+ * Configure Adl storage file system.
  */
 public final class AdlStorageConfiguration {
-  private AdlStorageConfiguration() {}
-
   private static final String CONTRACT_ENABLE_KEY =
-      "dfs.adl.test.contract" + ".enable";
+      "dfs.adl.test.contract.enable";
+
   private static final String TEST_CONFIGURATION_FILE_NAME =
       "contract-test-options.xml";
   private static final String TEST_SUPPORTED_TEST_CONFIGURATION_FILE_NAME =
       "adls.xml";
+  private static final String KEY_FILE_SYSTEM_IMPL = "fs.contract.test.fs";
+  private static final String KEY_FILE_SYSTEM = "test.fs.adl.name";
 
   private static boolean isContractTestEnabled = false;
   private static Configuration conf = null;
 
-  public static Configuration getConfiguration() {
-    Configuration localConf = new Configuration();
-    localConf.addResource(TEST_CONFIGURATION_FILE_NAME);
-    localConf.addResource(TEST_SUPPORTED_TEST_CONFIGURATION_FILE_NAME);
-    return localConf;
+  private AdlStorageConfiguration() {
+  }
+
+  public synchronized static Configuration getConfiguration() {
+    Configuration newConf = new Configuration();
+    newConf.addResource(TEST_CONFIGURATION_FILE_NAME);
+    newConf.addResource(TEST_SUPPORTED_TEST_CONFIGURATION_FILE_NAME);
+    return newConf;
   }
 
-  public static boolean isContractTestEnabled() {
+  public synchronized static boolean isContractTestEnabled() {
     if (conf == null) {
       conf = getConfiguration();
     }
@@ -59,18 +62,33 @@ public final class AdlStorageConfiguration {
     return isContractTestEnabled;
   }
 
-  public static FileSystem createAdlStorageConnector()
+  public synchronized static FileSystem createStorageConnector()
       throws URISyntaxException, IOException {
     if (conf == null) {
       conf = getConfiguration();
     }
 
-    if(!isContractTestEnabled()) {
+    if (!isContractTestEnabled()) {
       return null;
     }
 
-    AdlFileSystem fileSystem = new AdlFileSystem();
-    fileSystem.initialize(new URI(conf.get("fs.defaultFS")), conf);
-    return fileSystem;
+    String fileSystem = conf.get(KEY_FILE_SYSTEM);
+    if (fileSystem == null || fileSystem.trim().length() == 0) {
+      throw new IOException("Default file system not configured.");
+    }
+    String fileSystemImpl = conf.get(KEY_FILE_SYSTEM_IMPL);
+    if (fileSystemImpl == null || fileSystemImpl.trim().length() == 0) {
+      throw new IOException(
+          "Configuration " + KEY_FILE_SYSTEM_IMPL + "does not exist.");
+    }
+    FileSystem fs = null;
+    try {
+      fs = (FileSystem) Class.forName(fileSystemImpl).newInstance();
+    } catch (Exception e) {
+      throw new IOException("Could not instantiate the filesystem.");
+    }
+
+    fs.initialize(new URI(conf.get(KEY_FILE_SYSTEM)), conf);
+    return fs;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c61ad24/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
index 1e40199..262b636 100644
--- 
a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
+++ 
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
@@ -27,20 +27,16 @@ import org.apache.hadoop.fs.contract.AbstractFSContract;
 import java.io.IOException;
 import java.net.URISyntaxException;
 
-/**
- * Extension of AbstractFSContract representing a filesystem contract that
- * a Adls filesystem implementation is expected implement.
- */
-public class AdlStorageContract extends AbstractFSContract {
+class AdlStorageContract extends AbstractFSContract {
   private FileSystem fs;
 
   protected AdlStorageContract(Configuration conf) {
     super(conf);
     try {
-      fs = AdlStorageConfiguration.createAdlStorageConnector();
+      fs = AdlStorageConfiguration.createStorageConnector();
     } catch (URISyntaxException e) {
       throw new IllegalStateException("Can not initialize ADL FileSystem. "
-          + "Please check fs.defaultFS property.", e);
+          + "Please check test.fs.adl.name property.", e);
     } catch (IOException e) {
       throw new IllegalStateException("Can not initialize ADL FileSystem.", e);
     }
@@ -59,7 +55,12 @@ public class AdlStorageContract extends AbstractFSContract {
 
   @Override
   public Path getTestPath() {
-    Path path = new Path("/test");
-    return path;
+    return new Path("/test");
   }
+
+  @Override
+  public boolean isEnabled() {
+    return AdlStorageConfiguration.isContractTestEnabled();
+  }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to