diff --git bigtop-tests/smoke-tests/hdfs-ec/build.gradle bigtop-tests/smoke-tests/hdfs-ec/build.gradle
new file mode 100644
index 0000000..3a51cc2
--- /dev/null
+++ bigtop-tests/smoke-tests/hdfs-ec/build.gradle
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+def tests_to_include() {
+  return [
+      "TestECPolicy.groovy",
+      "TestPutEC.groovy",
+      /*
+      "TestDFSCLI.java",        // Can't run until we learn how to change
+                                // effective user in the runtime
+      */
+  ];
+}
+
+def junitVersion = '4.11'
+dependencies {
+  compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
+  compile group: 'org.apache.hadoop', name: 'hadoop-common',
+      version: hadoopVersion, classifier: 'tests', transitive: 'true'
+  compile group: 'org.apache.hadoop', name: 'hadoop-hdfs',
+      version: hadoopVersion, classifier: 'tests', transitive: 'true'
+  compile group: 'org.apache.hadoop', name: 'hadoop-common', version: hadoopVersion, transitive: 'true'
+  compile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: hadoopVersion, transitive: 'true'
+  if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
+}
+
+sourceSets {
+  main {
+    java {
+      srcDirs = [
+        "${BIGTOP_HOME}/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec",
+        "${BIGTOP_HOME}/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs",
+        "${BIGTOP_HOME}/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs"
+      ]
+    }
+  }
+  test {
+    groovy {
+      resources {
+        srcDirs = [
+            "${BIGTOP_HOME}/bigtop-tests/test-artifacts/hadoop/src/main/resources",
+        ]
+      }
+      srcDirs = [
+        "${BIGTOP_HOME}/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs",
+        "${BIGTOP_HOME}/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec"
+      ]
+      exclude {
+        FileTreeElement elem -> (doExclude(elem.getName()))
+      }
+    }
+  }
+}
+
+test.doFirst {
+  checkEnv(["HADOOP_CONF_DIR"])
+}
+
+test {
+  // Change the default location where configuration file is picked up
+  systemProperty 'test.cache.data', "${buildDir}/resources/test/clitest_data/"
+  // Change the default location where test data is picked up
+  systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
+  // default user for HCFS CLI test is file-system superuser. Default is 'hdfs'
+  // However, running that test in provisioned is done under root, hence the
+  // change to avoid test assert of the effective user
+  systemProperty 'hcfs.root.username', System.properties['user.name']
+}
diff --git bigtop-tests/smoke-tests/hdfs/build.gradle bigtop-tests/smoke-tests/hdfs/build.gradle
index ee1da46..4d42df7 100644
--- bigtop-tests/smoke-tests/hdfs/build.gradle
+++ bigtop-tests/smoke-tests/hdfs/build.gradle
@@ -17,15 +17,16 @@
  */
 def tests_to_include() {
   return [
-      "TestBlockRecovery.groovy",
-      "TestDistCpIntra.groovy",
-      "TestFileAppend.groovy",
-      "TestFsck.groovy",
-      "TestHDFSQuota.groovy",
-      "TestHDFSCLI.java",
-      "TestTextSnappy.groovy",
-      "TestDFSAdmin.groovy",
-      "TestHDFSBalancer.groovy",
+      "TestPut.groovy",
+//      "TestBlockRecovery.groovy",
+//      "TestDistCpIntra.groovy",
+//      "TestFileAppend.groovy",
+//      "TestFsck.groovy",
+//      "TestHDFSQuota.groovy",
+//      "TestHDFSCLI.java",
+//      "TestTextSnappy.groovy",
+//      "TestDFSAdmin.groovy",
+//      "TestHDFSBalancer.groovy",
       /*
       "TestDFSCLI.java",        // Can't run until we learn how to change
                                 // effective user in the runtime
diff --git bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec/TestECPolicy.groovy bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec/TestECPolicy.groovy
new file mode 100644
index 0000000..75600d3
--- /dev/null
+++ bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec/TestECPolicy.groovy
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs
+
+import org.junit.Test
+import org.apache.bigtop.itest.shell.Shell
+import static org.junit.Assert.assertTrue
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+
+/**
+ * Tests the HDFS Erasure Coding Policy commands.
+ */
+public class TestECPolicy {
+  private static Shell sh = new Shell("/bin/bash -s");
+  private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+      replaceAll("\\s", "").replaceAll(":", "");
+  private static String testECPolicyDir = "testECPolicyDir" + date;
+  private static String TESTDIR = "/user/$USERNAME/$testECPolicyDir";
+
+
+  @Test
+  public void testFsckBasic() {
+    sh.exec("hdfs dfs -mkdir $TESTDIR")
+    logError(sh)
+    assertTrue("Could not create dir: $TESTDIR", sh.getRet() == 0);
+    shHDFS.exec("hdfs erasurecode -setPolicy -p RS-6-3-64k $TESTDIR")
+    logError(shHDFS)
+    assertTrue("Could not set EC policy for $TESTDIR", shHDFS.getRet() == 0);
+    shHDFS.exec("hdfs erasurecode -getPolicy $TESTDIR")
+    logError(shHDFS)
+    assertTrue("Could not get EC policy for $TESTDIR", shHDFS.getRet() == 0);
+    sh.exec("hdfs dfs -rm -R $TESTDIR")
+    logError(sh)
+    assertTrue("Could not remove EC dir: $TESTDIR", sh.getRet() == 0);
+  }
+
+}
diff --git bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec/TestPutEC.groovy bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec/TestPutEC.groovy
new file mode 100644
index 0000000..18ab6fa
--- /dev/null
+++ bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs-ec/TestPutEC.groovy
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional infoPutation
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing pePutissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError;
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestPutEC {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+      replaceAll("\\s", "").replaceAll(":", "");
+  private static String namenode = "";
+  private static String testPutInputDir = "testPutInputDir" + date;
+  private static String testPutInputs = "test_data_TestPut"
+  private static String testPutOut = "testPutOut" + date;
+  private static String testPutOutCmp = "testPutOutCmp" + date;
+  private static String user_testinputdir = USERNAME + "/" + testPutInputDir + "/" + testPutInputs;
+  static List<String> TestPut_output = new ArrayList<String>();
+  static List<String> TestPut_error = new ArrayList<String>();
+  private static String TESTDIR = "/user/$USERNAME/$testPutInputDir";
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestPutEC.class, ".", null);
+    sh.exec("cp -r test_data test_data_TestPut");
+    assertTrue("Could not copy data into test_data_TestPut", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+    shHDFS.exec("hdfs erasurecode -setPolicy -p RS-6-3-64k $TESTDIR")
+    logError(shHDFS)
+    assertTrue("Could not set EC policy for $TESTDIR", shHDFS.getRet() == 0);
+    println("Running Put:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testPutOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -f $testPutOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testPutInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testPutDirectory() {
+    println("testPutDirectory");
+    // upload directory to hdfs
+    sh.exec("hdfs dfs -put $testPutInputs $TESTDIR ");
+    assertTrue("Could not put files to HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls -R $TESTDIR/$testPutInputs ");
+    assertTrue("could not find the copied directory on hdfs",
+        sh.getRet() == 0);
+
+    assertTrue("Able to find uploaded files on hdfs?",
+        sh.getOut().grep(~/.*test_3.*/).size() > 0);
+    result = false;
+  }
+
+  @Test
+  public void testPutFile() {
+    println("testPutFile");
+    // upload single files
+    sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -cat $TESTDIR/test_1.txt &> $testPutOut");
+    assertTrue("Able to cat data from $TESTDIR/test_1.txt from hdfs?",
+        sh.getRet() == 0);
+
+    sh.exec("cat $testPutInputs/test_1.txt &> $testPutOutCmp");
+    assertTrue("Able to cat data from $testPutInputs/test_1.txt from local?",
+        sh.getRet() == 0);
+
+    sh.exec("diff $testPutOutCmp $testPutOut");
+    assertTrue("Uploaded file data differs with local file?",
+        sh.getRet() == 0);
+  }
+
+  @Test
+  public void testPutMutltipleFiles() {
+    println("TestPutAdvanced");
+    // copy multiple input files to hdfs
+    sh.exec("hdfs dfs -put $testPutInputs/test_2.txt $testPutInputs/test.zip " +
+        "$testPutInputs/test_3 $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -R $TESTDIR ");
+    assertTrue("could not find the copied directory on hdfs",
+        sh.getRet() == 0);
+
+    assertTrue("Does test_2.txt uploaded properly?",
+        sh.getOut().grep(~/.*test_2.txt.*/).size() > 0);
+
+    assertTrue("Does test.zip uploaded properly?",
+        sh.getOut().grep(~/.*test.zip.*/).size() > 0);
+
+    assertTrue("Does test_3 uploaded properly?",
+        sh.getOut().grep(~/.*test_3.*/).size() > 0);
+  }
+
+  @Test
+  public void testPutNonExistingFile() {
+    println("testPutNonExistingFile");
+    sh.exec("hdfs dfs -put $testPutInputs/test_3.txt $TESTDIR");
+    assertTrue("A non existing file got copied to hdfs", sh.getRet() == 1);
+
+    String searchToken = "put: `" + testPutInputs + "/test_3.txt': " +
+        "No such file or directory";
+    println(searchToken);
+    assertTrue("Able to Upload non-existing file?",
+        sh.getErr().grep(~/.*${searchToken}.*/).size() > 0);
+  }
+
+  @Test
+  public void testPutToOverWriteFile() {
+    println("testPutNonExistingFile");
+    // copy a file which is already present on HDFS at the destination
+    sh.exec("hdfs dfs -test -f $TESTDIR/test_1.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR");
+      assertTrue("Able to upload file?", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR ");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 1);
+
+    String searchToken = "put: `/user/" + USERNAME + "/" +
+        testPutInputDir + "/test_1.txt': File exists";
+    assertTrue("Able to Upload non-existing file?",
+        sh.getErr().grep(~/.*${searchToken}.*/).size() > 0);
+  }
+}
