http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnExec.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnExec.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnExec.approved.txt
index df86b02..2ae7fa1 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnExec.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnExec.approved.txt
@@ -1,5 +1,5 @@
-/tmp/hive-ptest-units/TestHostExecutor/logs/succeeded/driver-parallel-1 
/some/local/dir/somehost-someuser-1/logs/
-/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-parallel-1.sh 
/some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-parallel-1.sh
-/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-parallel-1.sh 
/some/local/dir/somehost-someuser-1/scratch/hiveptest-driver-parallel-1.sh
-bash /some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-parallel-1.sh
-bash /some/local/dir/somehost-someuser-1/scratch/hiveptest-driver-parallel-1.sh
\ No newline at end of file
+/tmp/hive-ptest-units/TestHostExecutor/logs/succeeded/1_driver-parallel-1 
/some/local/dir/somehost-someuser-1/logs/
+/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-1_driver-parallel-1.sh
 /some/local/dir/somehost-someuser-0/scratch/hiveptest-1_driver-parallel-1.sh
+/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-1_driver-parallel-1.sh
 /some/local/dir/somehost-someuser-1/scratch/hiveptest-1_driver-parallel-1.sh
+bash 
/some/local/dir/somehost-someuser-0/scratch/hiveptest-1_driver-parallel-1.sh
+bash 
/some/local/dir/somehost-someuser-1/scratch/hiveptest-1_driver-parallel-1.sh
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnRsync.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnRsync.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnRsync.approved.txt
index 4f32a9f..76ab9e3 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnRsync.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.testParallelFailsOnRsync.approved.txt
@@ -1,4 +1,4 @@
-/tmp/hive-ptest-units/TestHostExecutor/logs/succeeded/driver-parallel-1 
/some/local/dir/somehost-someuser-1/logs/
-/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-parallel-1.sh 
/some/local/dir/somehost-someuser-0/scratch/hiveptest-driver-parallel-1.sh
-/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-driver-parallel-1.sh 
/some/local/dir/somehost-someuser-1/scratch/hiveptest-driver-parallel-1.sh
-bash /some/local/dir/somehost-someuser-1/scratch/hiveptest-driver-parallel-1.sh
\ No newline at end of file
+/tmp/hive-ptest-units/TestHostExecutor/logs/succeeded/1_driver-parallel-1 
/some/local/dir/somehost-someuser-1/logs/
+/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-1_driver-parallel-1.sh
 /some/local/dir/somehost-someuser-0/scratch/hiveptest-1_driver-parallel-1.sh
+/tmp/hive-ptest-units/TestHostExecutor/scratch/hiveptest-1_driver-parallel-1.sh
 /some/local/dir/somehost-someuser-1/scratch/hiveptest-1_driver-parallel-1.sh
+bash 
/some/local/dir/somehost-someuser-1/scratch/hiveptest-1_driver-parallel-1.sh
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt
index 092461b..5318a83 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testAlternativeTestJVM.approved.txt
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 set -x
+date +"%Y-%m-%d %T.%3N"
 umask 0022
 echo $$
 ps -e -o pid,pgrp,user,args
@@ -37,6 +38,8 @@ export M2_OPTS="-Xmx1g -XX:MaxPermSize=256m 
-Djava.io.tmpdir=/some/log/dir/tmp $
 export HADOOP_ROOT_LOGGER=INFO,console
 export HADOOP_OPTS="-Dhive.log.dir=/some/log/dir -Dhive.query.id=hadoop 
-Djava.io.tmpdir=/some/log/dir/tmp"
 cd /some/local/dir/instance-1/apache-source || exit 1
+date +"%Y-%m-%d %T.%3N"
+echo "Pre test cleanup"
 if [[ -s batch.pid ]]
 then
   while read pid
@@ -52,10 +55,11 @@ echo "$$" > batch.pid
 find ./ -name 'TEST-*.xml' -delete
 find ./ -name 'hive.log' -delete
 find ./ -name junit_metastore_db | xargs -r rm -rf
+date +"%Y-%m-%d %T.%3N"
+echo "Pre test cleanup done"
 ret=0
 if [[ "ant" == "maven" ]]
 then
-  testModule=$(find ./ -name 'TestCliDriver.java' | awk -F'/' '{print $2}')
   if [[ -z "$testModule" ]]
   then
     testModule=./
@@ -63,6 +67,7 @@ then
   pushd $testModule
   timeout 40m mvn -B test -Dmaven.repo.local=/some/local/dir/instance-1/maven \
     $mavenArgs $mavenTestArgs -Dtest=arg1 1>/some/log/dir/maven-test.txt 2>&1 
</dev/null &
+  date +"%Y-%m-%d %T.%3N"
 
   pid=$!
 
@@ -85,6 +90,7 @@ fi
 echo $pid >> batch.pid
 wait $pid
 ret=$?
+date +"%Y-%m-%d %T.%3N"
 find ./ -type f -name hive.log -o -name spark.log -o -name derby.log | \
   xargs -I {} sh -c 'f=$(basename {}); test -f /some/log/dir/$f && 
f=$f-$(uuidgen); mv {} /some/log/dir/$f'
 find ./ -type f -name 'TEST-*.xml' | \
@@ -93,7 +99,7 @@ find ./ -path "*/spark/work" | \
   xargs -I {} sh -c 'mv {} /some/log/dir/spark-log'
 find ./ -type f -name 'syslog*' | \
   xargs -I {} sh -c 'mkdir -p /some/log/dir/syslogs; mv {} 
/some/log/dir/syslogs'
-
+date +"%Y-%m-%d %T.%3N"
 
 if [[ -f /some/log/dir/.log ]]
 then

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt
index 3270167..e165240 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testBatch.approved.txt
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 set -x
+date +"%Y-%m-%d %T.%3N"
 umask 0022
 echo $$
 ps -e -o pid,pgrp,user,args
@@ -37,6 +38,8 @@ export M2_OPTS="-Xmx1g -XX:MaxPermSize=256m 
-Djava.io.tmpdir=/some/log/dir/tmp $
 export HADOOP_ROOT_LOGGER=INFO,console
 export HADOOP_OPTS="-Dhive.log.dir=/some/log/dir -Dhive.query.id=hadoop 
-Djava.io.tmpdir=/some/log/dir/tmp"
 cd /some/local/dir/instance-1/apache-source || exit 1
+date +"%Y-%m-%d %T.%3N"
+echo "Pre test cleanup"
 if [[ -s batch.pid ]]
 then
   while read pid
@@ -52,10 +55,11 @@ echo "$$" > batch.pid
 find ./ -name 'TEST-*.xml' -delete
 find ./ -name 'hive.log' -delete
 find ./ -name junit_metastore_db | xargs -r rm -rf
+date +"%Y-%m-%d %T.%3N"
+echo "Pre test cleanup done"
 ret=0
 if [[ "maven" == "maven" ]]
 then
-  testModule=$(find ./ -name 'TestCliDriver.java' | awk -F'/' '{print $2}')
   if [[ -z "$testModule" ]]
   then
     testModule=./
@@ -63,6 +67,7 @@ then
   pushd $testModule
   timeout 40m mvn -B test -Dmaven.repo.local=/some/local/dir/instance-1/maven \
     -Dant=arg1 $mavenTestArgs -Dtest=arg1 1>/some/log/dir/maven-test.txt 2>&1 
</dev/null &
+  date +"%Y-%m-%d %T.%3N"
 
   pid=$!
 
@@ -85,6 +90,7 @@ fi
 echo $pid >> batch.pid
 wait $pid
 ret=$?
+date +"%Y-%m-%d %T.%3N"
 find ./ -type f -name hive.log -o -name spark.log -o -name derby.log | \
   xargs -I {} sh -c 'f=$(basename {}); test -f /some/log/dir/$f && 
f=$f-$(uuidgen); mv {} /some/log/dir/$f'
 find ./ -type f -name 'TEST-*.xml' | \
@@ -93,7 +99,7 @@ find ./ -path "*/spark/work" | \
   xargs -I {} sh -c 'mv {} /some/log/dir/spark-log'
 find ./ -type f -name 'syslog*' | \
   xargs -I {} sh -c 'mkdir -p /some/log/dir/syslogs; mv {} 
/some/log/dir/syslogs'
-
+date +"%Y-%m-%d %T.%3N"
 
 if [[ -f /some/log/dir/.log ]]
 then

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepGit.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepGit.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepGit.approved.txt
index d58d910..29d2413 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepGit.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepGit.approved.txt
@@ -15,6 +15,7 @@
 
 set -e
 set -x
+date +"%Y-%m-%d %T.%3N"
 if [[ -n "/usr/java/jdk1.7" ]]
 then
   export JAVA_HOME=/usr/java/jdk1.7
@@ -63,13 +64,15 @@ cd /some/working/dir/
     then
       git clone git:///repo1 apache-source
     fi
+    date +"%Y-%m-%d %T.%3N"
     cd apache-source
     git fetch origin
     git reset --hard HEAD && git clean -f -d
     git checkout branch-1 || git checkout -b branch-1 origin/branch-1
     git reset --hard origin/branch-1
     git merge --ff-only origin/branch-1
-    git gc
+    #git gc
+    date +"%Y-%m-%d %T.%3N"
   else
     echo "Unknown repository type 'git'"
     exit 1
@@ -85,8 +88,11 @@ cd /some/working/dir/
   then
     rm -rf /some/working/dir/maven/org/apache/hive
         mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven -X -Phadoop-2
-    cd itests
-    mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven -X -Phadoop-2
+    if [[ -d "itests" ]]
+    then
+      cd itests
+      mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven -X -Phadoop-2
+    fi
   elif [[ "${buildTool}" == "ant" ]]
   then
     ant -Dant=arg1 -Divy.default.ivy.user.dir=/some/working/dir/ivy \
@@ -96,5 +102,6 @@ cd /some/working/dir/
      echo "Unknown build tool ${buildTool}"
      exit 127
    fi
+   date +"%Y-%m-%d %T.%3N"
 ) 2>&1 | tee /some/log/dir/source-prep.txt
 exit ${PIPESTATUS[0]}

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepHadoop1.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepHadoop1.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepHadoop1.approved.txt
index 1b9ca94..361b9bb 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepHadoop1.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepHadoop1.approved.txt
@@ -15,6 +15,7 @@
 
 set -e
 set -x
+date +"%Y-%m-%d %T.%3N"
 if [[ -n "/usr/java/jdk1.7" ]]
 then
   export JAVA_HOME=/usr/java/jdk1.7
@@ -63,13 +64,15 @@ cd /some/working/dir/
     then
       git clone https://svn.apache.org/repos/asf/hive/trunk apache-source
     fi
+    date +"%Y-%m-%d %T.%3N"
     cd apache-source
     git fetch origin
     git reset --hard HEAD && git clean -f -d
     git checkout  || git checkout -b  origin/
     git reset --hard origin/
     git merge --ff-only origin/
-    git gc
+    #git gc
+    date +"%Y-%m-%d %T.%3N"
   else
     echo "Unknown repository type 'svn'"
     exit 1
@@ -89,13 +92,19 @@ cd /some/working/dir/
     for i in $(echo $ADDITIONAL_PROFILES | tr "," "\n")
       do
         mvn clean install -DskipTests -P$i;
-        cd itests
-        mvn clean install -DskipTests -P$i;
+        if [[ "-d itests" ]]
+        then
+          cd itests
+          mvn clean install -DskipTests -P$i;
         cd ..
+        fi
       done
         mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
-    cd itests
-    mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
+    if [[ -d "itests" ]]
+    then
+      cd itests
+      mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
+    fi
   elif [[ "maven" == "ant" ]]
   then
     ant -Dant=arg1 -Divy.default.ivy.user.dir=/some/working/dir/ivy \
@@ -105,5 +114,6 @@ cd /some/working/dir/
      echo "Unknown build tool maven"
      exit 127
    fi
+   date +"%Y-%m-%d %T.%3N"
 ) 2>&1 | tee /some/log/dir/source-prep.txt
 exit ${PIPESTATUS[0]}

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepNone.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepNone.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepNone.approved.txt
index dde8822..5f494ee 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepNone.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepNone.approved.txt
@@ -15,6 +15,7 @@
 
 set -e
 set -x
+date +"%Y-%m-%d %T.%3N"
 if [[ -n "/usr/java/jdk1.7" ]]
 then
   export JAVA_HOME=/usr/java/jdk1.7
@@ -63,13 +64,15 @@ cd /some/working/dir/
     then
       git clone git:///repo1 apache-source
     fi
+    date +"%Y-%m-%d %T.%3N"
     cd apache-source
     git fetch origin
     git reset --hard HEAD && git clean -f -d
     git checkout branch-1 || git checkout -b branch-1 origin/branch-1
     git reset --hard origin/branch-1
     git merge --ff-only origin/branch-1
-    git gc
+    #git gc
+    date +"%Y-%m-%d %T.%3N"
   else
     echo "Unknown repository type '${repositoryType}'"
     exit 1
@@ -85,8 +88,11 @@ cd /some/working/dir/
   then
     rm -rf /some/working/dir/maven/org/apache/hive
         mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
-    cd itests
-    mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
+    if [[ -d "itests" ]]
+    then
+      cd itests
+      mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
+    fi
   elif [[ "ant" == "ant" ]]
   then
     ant -Dant=arg1 -Divy.default.ivy.user.dir=/some/working/dir/ivy \
@@ -96,5 +102,6 @@ cd /some/working/dir/
      echo "Unknown build tool ant"
      exit 127
    fi
+   date +"%Y-%m-%d %T.%3N"
 ) 2>&1 | tee /some/log/dir/source-prep.txt
 exit ${PIPESTATUS[0]}

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepSvn.approved.txt
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepSvn.approved.txt
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepSvn.approved.txt
index f3eec2d..9de17af 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepSvn.approved.txt
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepSvn.approved.txt
@@ -15,6 +15,7 @@
 
 set -e
 set -x
+date +"%Y-%m-%d %T.%3N"
 if [[ -n "/usr/java/jdk1.7" ]]
 then
   export JAVA_HOME=/usr/java/jdk1.7
@@ -63,13 +64,15 @@ cd /some/working/dir/
     then
       git clone https://svn.apache.org/repos/asf/hive/trunk apache-source
     fi
+    date +"%Y-%m-%d %T.%3N"
     cd apache-source
     git fetch origin
     git reset --hard HEAD && git clean -f -d
     git checkout  || git checkout -b  origin/
     git reset --hard origin/
     git merge --ff-only origin/
-    git gc
+    #git gc
+    date +"%Y-%m-%d %T.%3N"
   else
     echo "Unknown repository type 'svn'"
     exit 1
@@ -85,8 +88,11 @@ cd /some/working/dir/
   then
     rm -rf /some/working/dir/maven/org/apache/hive
         mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
-    cd itests
-    mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
+    if [[ -d "itests" ]]
+    then
+      cd itests
+      mvn -B clean install -DskipTests 
-Dmaven.repo.local=/some/working/dir/maven $mavenArgs $mavenBuildArgs
+    fi
   elif [[ "maven" == "ant" ]]
   then
     ant -Dant=arg1 -Divy.default.ivy.user.dir=/some/working/dir/ivy \
@@ -96,5 +102,6 @@ cd /some/working/dir/
      echo "Unknown build tool maven"
      exit 127
    fi
+   date +"%Y-%m-%d %T.%3N"
 ) 2>&1 | tee /some/log/dir/source-prep.txt
 exit ${PIPESTATUS[0]}

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestQFileTestBatch.java
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestQFileTestBatch.java
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestQFileTestBatch.java
index 0257591..fb7bee8 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestQFileTestBatch.java
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestQFileTestBatch.java
@@ -19,6 +19,7 @@
 package org.apache.hive.ptest.execution.conf;
 
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import junit.framework.Assert;
 
@@ -44,26 +45,26 @@ public class TestQFileTestBatch {
   @Test
   public void testParallel() throws Exception {
     QFileTestBatch batch =
-        new QFileTestBatch("testcase", DRIVER, QUERY_FILES_PROPERTY, tests, 
true, TEST_MODULE_NAME);
+        new QFileTestBatch(new AtomicInteger(1), "testcase", DRIVER, 
QUERY_FILES_PROPERTY, tests, true, TEST_MODULE_NAME);
     Assert.assertTrue(batch.isParallel());
     Assert.assertEquals(DRIVER, batch.getDriver());
     Assert.assertEquals(Joiner.on("-").join(DRIVER, "a", "b", "c"), 
batch.getName());
     Assert.assertEquals(String.format("-Dtestcase=%s -D%s=a,b,c", DRIVER,
         QUERY_FILES_PROPERTY), batch.getTestArguments());
-    Assert.assertEquals(TEST_MODULE_NAME, batch.getTestModule());
+    Assert.assertEquals(TEST_MODULE_NAME, batch.getTestModuleRelativeDir());
   }
   @Test
   public void testMoreThanThreeTests() throws Exception {
     Assert.assertTrue(tests.add("d"));
     QFileTestBatch batch =
-        new QFileTestBatch("testcase", DRIVER, QUERY_FILES_PROPERTY, tests, 
true, TEST_MODULE_NAME);
+        new QFileTestBatch(new AtomicInteger(1), "testcase", DRIVER, 
QUERY_FILES_PROPERTY, tests, true, TEST_MODULE_NAME);
     Assert.assertEquals(Joiner.on("-").join(DRIVER, "a", "b", "c", "and", "1", 
"more"),
         batch.getName());
   }
   @Test
   public void testNotParallel() throws Exception {
     QFileTestBatch batch =
-        new QFileTestBatch("testcase", DRIVER, QUERY_FILES_PROPERTY, tests, 
false,
+        new QFileTestBatch(new AtomicInteger(1), "testcase", DRIVER, 
QUERY_FILES_PROPERTY, tests, false,
             TEST_MODULE_NAME);
     Assert.assertFalse(batch.isParallel());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
index 1ec27f5..bbf3226 100644
--- 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
@@ -23,6 +23,7 @@ import java.io.FileOutputStream;
 import java.io.OutputStream;
 import java.util.List;
 import java.util.Properties;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import junit.framework.Assert;
 
@@ -91,6 +92,7 @@ public class TestTestParser {
   @Test
   public void testParseWithExcludes() throws Exception {
     context.put("unitTests.directories", "build/1 build/2");
+    context.put("unitTests.subdirForPrefix", "units");
     context.put("unitTests.exclude", "TestA");
     context.put("unitTests.isolate", "TestB");
     context.put("qFileTests", "f");
@@ -101,13 +103,14 @@ public class TestTestParser {
     context.put("qFileTest.f.isolate", "isolated");
     context.put("qFileTest.f.groups.excluded", "excluded.q");
     context.put("qFileTest.f.groups.isolated", "isolated.q");
-    testParser = new TestParser(context, "testcase", workingDirectory, LOG);
+    testParser = new TestParser(context, new AtomicInteger(1),  "testcase", 
workingDirectory, LOG);
     List<TestBatch> testBatches = testParser.parse().get();
     Assert.assertEquals(4, testBatches.size());
   }
   @Test
   public void testParseWithIncludes() throws Exception {
     context.put("unitTests.directories", "build/1 build/2");
+    context.put("unitTests.subdirForPrefix", "units");
     context.put("unitTests.include", "TestA TestB");
     context.put("unitTests.isolate", "TestB");
     context.put("qFileTests", "f");
@@ -118,13 +121,14 @@ public class TestTestParser {
     context.put("qFileTest.f.queryFilesProperty", "qfile");
     context.put("qFileTest.f.groups.included", "included.q isolated.q");
     context.put("qFileTest.f.groups.isolated", "isolated.q");
-    testParser = new TestParser(context, "testcase", workingDirectory, LOG);
+    testParser = new TestParser(context, new AtomicInteger(1), "testcase", 
workingDirectory, LOG);
     List<TestBatch> testBatches = testParser.parse().get();
     Assert.assertEquals(4, testBatches.size());
   }
   @Test
   public void testParsePropertyFile() throws Exception {
     context.put("unitTests.directories", "build/1 build/2");
+    context.put("unitTests.subdirForPrefix", "units");
     context.put("unitTests.include", "TestA TestB");
     context.put("unitTests.isolate", "TestB");
     context.put("qFileTests", "f");
@@ -139,7 +143,7 @@ public class TestTestParser {
     context.put("qFileTest.f.groups.included", "prop.${normal.one.group} 
prop.${normal.two.group} prop.${isolated.group}");
     context.put("qFileTest.f.groups.isolated", "prop.${isolated.group}");
     context.put("qFileTest.f.groups.excluded", "prop.${excluded.group}");
-    testParser = new TestParser(context, "testcase", workingDirectory, LOG);
+    testParser = new TestParser(context, new AtomicInteger(1), "testcase", 
workingDirectory, LOG);
     List<TestBatch> testBatches = testParser.parse().get();
     Assert.assertEquals(4, testBatches.size());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestUnitTestPropertiesParser.java
----------------------------------------------------------------------
diff --git 
a/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestUnitTestPropertiesParser.java
 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestUnitTestPropertiesParser.java
new file mode 100644
index 0000000..5bc521a
--- /dev/null
+++ 
b/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestUnitTestPropertiesParser.java
@@ -0,0 +1,672 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hive.ptest.execution.conf;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.Sets;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class TestUnitTestPropertiesParser {
+
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestUnitTestPropertiesParser.class);
+
+  private static final String MODULE1_NAME = "module1";
+  private static final String MODULE1_TEST_NAME = "Module1";
+  private static final String MODULE2_NAME = "module2";
+  private static final String MODULE2_TEST_NAME = "Module2";
+
+  private static final String TOP_LEVEL_TEST_NAME = "tl";
+  private static final String TWO_LEVEL_MODULE1_NAME = "module2l.submodule1";
+  private static final String TWO_LEVEL_TEST_NAME = "TwoLevel";
+  private static final String THREE_LEVEL_MODULE1_NAME = 
"module3l.sub.submodule1";
+  private static final String THREE_LEVEL_TEST_NAME = "ThreeLevel";
+
+  private static final String MODULE3_REL_DIR = "TwoLevel/module-2.6";
+  private static final String MODULE3_MODULE_NAME = "TwoLevel.module-2.6";
+  private static final String MODULE3_TEST_NAME = "Module3";
+
+
+  private static final int BATCH_SIZE_DEFAULT = 10;
+
+  private static final String TEST_CASE_PROPERT_NAME = "test";
+
+  @Test(timeout = 5000)
+  public void testSimpleSetup() {
+
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE2_NAME},
+        new int[]{5, 4},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelExclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_EXCLUDE),
+        "Test" + MODULE1_TEST_NAME + "1");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE2_NAME},
+        new int[]{4, 4},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelInclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_INCLUDE),
+        "Test" + MODULE1_TEST_NAME + "1" + " " + "Test" + MODULE1_TEST_NAME + 
"2");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        1,
+        new String[]{MODULE1_NAME},
+        new int[]{2},
+        new boolean[]{true});
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelSkipBatching() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_SKIP_BATCHING),
+        "Test" + MODULE1_TEST_NAME + "1" + " " + "Test" + MODULE1_TEST_NAME + 
"2");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        4,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE1_NAME, MODULE2_NAME},
+        new int[]{1, 1, 3, 4},
+        new boolean[]{true, true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelIsolate() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ISOLATE),
+        "Test" + MODULE1_TEST_NAME + "1" + " " + "Test" + MODULE1_TEST_NAME + 
"2");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        4,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE1_NAME, MODULE2_NAME},
+        new int[]{1, 1, 3, 4},
+        new boolean[]{false, false, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelBatchSize() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    context
+        .put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_BATCH_SIZE), 
Integer.toString(2));
+
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        5,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE1_NAME, MODULE2_NAME, 
MODULE2_NAME},
+        new int[]{2, 2, 1, 2, 2},
+        new boolean[]{true, true, true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleLevelExclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_EXCLUDE),
+        "Test" + MODULE1_TEST_NAME + "1");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE2_NAME},
+        new int[]{4, 4},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleLevelInclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_INCLUDE),
+        "Test" + MODULE1_TEST_NAME + "1" + " " + "Test" + MODULE1_TEST_NAME + 
"2");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE2_NAME},
+        new int[]{2, 4},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleLevelSkipBatching() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context
+        .put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_SKIP_BATCHING),
+            "Test" + MODULE1_TEST_NAME + "1" + " " + "Test" + 
MODULE1_TEST_NAME + "2");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        4,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE1_NAME, MODULE2_NAME},
+        new int[]{1, 1, 3, 4},
+        new boolean[]{true, true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleLevelIsolate() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_ISOLATE),
+        "Test" + MODULE1_TEST_NAME + "1" + " " + "Test" + MODULE1_TEST_NAME + 
"2");
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        4,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE1_NAME, MODULE2_NAME},
+        new int[]{1, 1, 3, 4},
+        new boolean[]{false, false, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleLevelBatchSize() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(2));
+
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        4,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE1_NAME, MODULE2_NAME},
+        new int[]{2, 2, 1, 4},
+        new boolean[]{true, true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testProvidedExclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 5, 4);
+
+    Set<String> excludedProvided = Sets.newHashSet("Test" + MODULE1_TEST_NAME 
+ "1");
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            excludedProvided, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE2_NAME},
+        new int[]{4, 4},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelBatchSizeIncludeAll() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 120, 60);
+    context
+        .put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_BATCH_SIZE), 
Integer.toString(0));
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE2_NAME},
+        new int[]{120, 60},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleLevelBatchSizeIncludeAll() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 50, 4);
+    context
+        .put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_BATCH_SIZE), 
Integer.toString(2));
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(0));
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        3,
+        new String[]{MODULE1_NAME, MODULE2_NAME, MODULE2_NAME},
+        new int[]{50, 2, 2},
+        new boolean[]{true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testMultiLevelModules() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProviderMultiLevel(baseDir, 
4, 30, 6, 9);
+    context
+        .put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_BATCH_SIZE), 
Integer.toString(4));
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE1_NAME),
+        MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(MODULE1_NAME, 
UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(0));
+
+    context.put(
+        getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
THREE_LEVEL_MODULE1_NAME),
+        THREE_LEVEL_MODULE1_NAME);
+    context.put(getUtSpecificPropertyName(THREE_LEVEL_MODULE1_NAME,
+        UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(0));
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        5,
+        new String[]{UnitTestPropertiesParser.PREFIX_TOP_LEVEL, MODULE1_NAME,
+            TWO_LEVEL_MODULE1_NAME, TWO_LEVEL_MODULE1_NAME, 
THREE_LEVEL_MODULE1_NAME},
+        new int[]{4, 30, 4, 2, 9},
+        new boolean[]{true, true, true, true, true});
+
+  }
+
+  @Test(timeout = 5000)
+  public void testTopLevelModuleConfig() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProviderMultiLevel(baseDir, 
9, 0, 0, 0);
+    context
+        .put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_BATCH_SIZE), 
Integer.toString(4));
+    context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE,
+        UnitTestPropertiesParser.MODULE_NAME_TOP_LEVEL),
+        UnitTestPropertiesParser.MODULE_NAME_TOP_LEVEL);
+    
context.put(getUtSpecificPropertyName(UnitTestPropertiesParser.MODULE_NAME_TOP_LEVEL,
+        UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(0));
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        1,
+        new String[]{UnitTestPropertiesParser.PREFIX_TOP_LEVEL},
+        new int[]{9},
+        new boolean[]{true});
+  }
+
+  @Test(timeout = 5000)
+  public void testScanMultipleDirectoriesNested() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProviderMultiLevel(baseDir, 
13, 5, 0, 0);
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_DIRECTORIES),
+        "./ ./" + MODULE1_NAME);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        3,
+        new String[]{UnitTestPropertiesParser.PREFIX_TOP_LEVEL,
+            UnitTestPropertiesParser.PREFIX_TOP_LEVEL, MODULE1_NAME},
+        new int[]{10, 3, 5},
+        new boolean[]{true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testScanMultipleDirectoriesNonNested() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 13, 8);
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_DIRECTORIES),
+        "./" + MODULE1_NAME + " " + "./" + MODULE2_NAME);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        3,
+        new String[]{MODULE1_NAME, MODULE1_NAME, MODULE2_NAME},
+        new int[]{10, 3, 8},
+        new boolean[]{true, true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleInclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 13, 8);
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_MODULE_LIST,
+        UnitTestPropertiesParser.PROP_INCLUDE), MODULE1_NAME);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        2,
+        new String[]{MODULE1_NAME, MODULE1_NAME},
+        new int[]{10, 3},
+        new boolean[]{true, true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleExclude() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider = getTestFileListProvider(baseDir, 13, 8);
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_MODULE_LIST,
+        UnitTestPropertiesParser.PROP_EXCLUDE), MODULE1_NAME);
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        1,
+        new String[]{MODULE2_NAME},
+        new int[]{8},
+        new boolean[]{true});
+  }
+
+  @Test(timeout = 5000)
+  public void testModuleWithPeriodInDirName() {
+    File baseDir = getFakeTestBaseDir();
+    Context context = getDefaultContext();
+
+    FileListProvider flProvider =
+        getTestFileListProviderSingleModule(baseDir, MODULE3_REL_DIR, 
MODULE3_TEST_NAME, 13);
+    context
+        .put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_ONE_MODULE, 
MODULE3_MODULE_NAME),
+            MODULE3_MODULE_NAME);
+    context.put(
+        getUtSpecificPropertyName(MODULE3_MODULE_NAME, 
UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(5));
+
+    UnitTestPropertiesParser parser =
+        new UnitTestPropertiesParser(context, new AtomicInteger(1), 
TEST_CASE_PROPERT_NAME, baseDir, LOG, flProvider,
+            null, true);
+    Collection<TestBatch> testBatchCollection = parser.generateTestBatches();
+    verifyBatches(testBatchCollection,
+        3,
+        new String[]{MODULE3_MODULE_NAME, MODULE3_MODULE_NAME, 
MODULE3_MODULE_NAME},
+        new int[]{5, 5, 3},
+        new boolean[]{true, true, true});
+
+  }
+
+  private void verifyBatches(Collection<TestBatch> testBatchCollection, int 
numBatches,
+                             String[] moduleNames, int[] testsPerBatch, 
boolean[] isParallel) {
+    List<TestBatch> testBatches = new LinkedList<>(testBatchCollection);
+    assertEquals(String.format("Expected batches=[%d], found=[%d]", 
numBatches, testBatches.size()),
+        numBatches, testBatches.size());
+    assert moduleNames.length == numBatches;
+    assert testsPerBatch.length == numBatches;
+    assert isParallel.length == numBatches;
+
+    for (int i = 0; i < numBatches; i++) {
+      TestBatch testBatch = testBatches.get(i);
+      if (!moduleNames[i].equals(UnitTestPropertiesParser.PREFIX_TOP_LEVEL)) {
+        moduleNames[i] = moduleNames[i].replace(".", "/");
+      }
+
+      assertEquals(String.format("Expected batchName=[%s], found=[%s] on 
index=%d", moduleNames[i],
+          testBatch.getTestModuleRelativeDir(), i), moduleNames[i],
+          testBatch.getTestModuleRelativeDir());
+      assertEquals(String.format("Expected size=[%d], found=[%d] on index=%d", 
testsPerBatch[i],
+          testBatch.getNumTestsInBatch(), i), testsPerBatch[i], 
testBatch.getNumTestsInBatch());
+      assertEquals(String.format("Expected isParallel=[%s], found=[%s] on 
index=%d", isParallel[i],
+          testBatch.isParallel(), i), isParallel[i], testBatch.isParallel());
+    }
+  }
+
+
+  private static File getFakeTestBaseDir() {
+    File javaTmpDir = new File(System.getProperty("java.io.tmpdir"));
+    File baseDir = new File(javaTmpDir, UUID.randomUUID().toString());
+    return baseDir;
+  }
+
+  /**
+   * Returns 2 modules. Counts can be specified.
+   *
+   * @param module1Count
+   * @param module2Count
+   * @return
+   */
+  private static FileListProvider getTestFileListProvider(final File baseDir,
+                                                          final int 
module1Count,
+                                                          final int 
module2Count) {
+
+    return new FileListProvider() {
+      @Override
+      public Collection<File> listFiles(File directory, String[] extensions, 
boolean recursive) {
+        List<File> list = new LinkedList<>();
+
+        File m1F = new File(baseDir, Joiner.on("/").join(MODULE1_NAME, 
"target", "test", "p1"));
+        for (int i = 0; i < module1Count; i++) {
+          list.add(new File(m1F, "Test" + MODULE1_TEST_NAME + (i + 1) + 
".class"));
+        }
+
+        File m2F = new File(baseDir, Joiner.on("/").join(MODULE2_NAME, 
"target", "test"));
+        for (int i = 0; i < module2Count; i++) {
+          list.add(new File(m2F, "Test" + MODULE2_TEST_NAME + (i + 1) + 
".class"));
+        }
+
+        return list;
+      }
+    };
+  }
+
+  private static FileListProvider getTestFileListProviderMultiLevel(final File 
baseDir,
+                                                                    final int 
l0Count,
+                                                                    final int 
l1Count,
+                                                                    final int 
l2Count,
+                                                                    final int 
l3Count) {
+    return new FileListProvider() {
+      @Override
+      public Collection<File> listFiles(File directory, String[] extensions, 
boolean recursive) {
+        List<File> list = new LinkedList<>();
+
+        File l0F = new File(baseDir, Joiner.on("/").join("target", "test", 
"p1", "p2"));
+        for (int i = 0; i < l0Count; i++) {
+          list.add(new File(l0F, "Test" + TOP_LEVEL_TEST_NAME + (i + 1) + 
".class"));
+        }
+
+
+        File l1F = new File(baseDir, Joiner.on("/").join(MODULE1_NAME, 
"target", "test"));
+        for (int i = 0; i < l1Count; i++) {
+          list.add(new File(l1F, "Test" + MODULE1_TEST_NAME + (i + 1) + 
".class"));
+        }
+
+        File l2F = new File(baseDir, 
Joiner.on("/").join(TWO_LEVEL_MODULE1_NAME, "target", "test"));
+        for (int i = 0; i < l2Count; i++) {
+          list.add(new File(l2F, "Test" + TWO_LEVEL_TEST_NAME + (i + 1) + 
".class"));
+        }
+
+        File l3F =
+            new File(baseDir, Joiner.on("/").join(THREE_LEVEL_MODULE1_NAME, 
"target", "test"));
+        for (int i = 0; i < l3Count; i++) {
+          list.add(new File(l3F, "Test" + THREE_LEVEL_TEST_NAME + (i + 1) + 
".class"));
+        }
+        return list;
+      }
+    };
+  }
+
+  private static FileListProvider getTestFileListProviderSingleModule(final 
File baseDir,
+                                                                      final 
String moduleRelDir,
+                                                                      final 
String testName,
+                                                                      final 
int numTests) {
+    return new FileListProvider() {
+
+      @Override
+      public Collection<File> listFiles(File directory, String[] extensions, 
boolean recursive) {
+        List<File> list = new LinkedList<>();
+        File f = new File(baseDir, Joiner.on("/").join(moduleRelDir, "target", 
"package", "test"));
+        for (int i = 0; i < numTests; i++) {
+          list.add(new File(f, "Test" + testName + (i + 1) + ".class"));
+        }
+        return list;
+      }
+    };
+  }
+
+  private static Context getDefaultContext() {
+    Context context = new Context();
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_DIRECTORIES), 
"./");
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_SUBDIR_FOR_PREFIX),
 "target");
+    
context.put(getUtRootPropertyName(UnitTestPropertiesParser.PROP_BATCH_SIZE),
+        Integer.toString(BATCH_SIZE_DEFAULT));
+    return context;
+  }
+
+  private static String getUtRootPropertyName(String p1, String... rest) {
+    return Joiner.on(".").join(UnitTestPropertiesParser.PROP_PREFIX_ROOT, p1, 
rest);
+  }
+
+  private static String getUtSpecificPropertyName(String p1, String... rest) {
+    return Joiner.on(".").join(UnitTestPropertiesParser.PROP_PREFIX_MODULE, 
p1, rest);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/resources/log4j2.properties
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/test/resources/log4j2.properties 
b/testutils/ptest2/src/test/resources/log4j2.properties
new file mode 100644
index 0000000..944556a
--- /dev/null
+++ b/testutils/ptest2/src/test/resources/log4j2.properties
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+status = INFO
+name = PTest2Log4j2
+packages = org.apache.hadoop.hive.ql.log
+
+# list of properties
+property.hive.ptest.log.level = DEBUG
+property.hive.ptest.root.logger = console
+
+# list of all appenders
+appenders = console
+
+# console appender
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+# list of all loggers
+loggers = Http, SpringFramework, OrgJclouds, Jclouds, Hive, NIOServerCnxn, 
ClientCnxnSocketNIO
+
+logger.Http.name = org.apache.http
+logger.Http.level = TRACE
+
+logger.SpringFramework.name = org.springframework
+logger.SpringFramework.level = INFO
+
+logger.OrgJclouds.name = org.jclouds
+logger.OrgJclouds.level = INFO
+
+logger.Jclouds.name = jclouds
+logger.Jclouds.level = INFO
+
+logger.Hive.name = org.apache.hive
+logger.Hive.level = DEBUG
+
+logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
+logger.NIOServerCnxn.level = WARN
+
+logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
+logger.ClientCnxnSocketNIO.level = WARN
+
+# root logger
+rootLogger.level = ${sys:hive.ptest.log.level}
+rootLogger.appenderRefs = root
+rootLogger.appenderRef.root.ref = ${sys:hive.ptest.root.logger}

http://git-wip-us.apache.org/repos/asf/hive/blob/eb126207/testutils/ptest2/src/test/resources/test-configuration2.properties
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/test/resources/test-configuration2.properties 
b/testutils/ptest2/src/test/resources/test-configuration2.properties
new file mode 100644
index 0000000..19e1ac2
--- /dev/null
+++ b/testutils/ptest2/src/test/resources/test-configuration2.properties
@@ -0,0 +1,154 @@
+repositoryType = git
+repository = http://git-wip-us.apache.org/repos/asf/hive.git
+repositoryName = apache-github-source
+branch = master
+
+
+#jiraUrl = https://issues.apache.org/jira
+#jiraUser =
+#jiraPassword =
+#jenkinsURL = https://builds.apache.org/job
+#logsURL = http://webserver/logs/
+
+mavenArgs =
+#mavenEnvOpts = -Dhttp.proxyHost=localhost -Dhttp.proxyPort=3128
+mavenTestArgs =
+testCasePropertyName = test
+buildTool = maven
+javaHome = /opt/jdk1.8.0_102
+javaHomeForTests = /opt/jdk1.8.0_102
+# Relative path to the src directory. If specified, will be treated as the 
module name.
+unitTests.directories = ./
+additionalProfiles =
+# TODO prepScriptPath
+# TODO execScriptPath
+# TODO applyPatchScriptPath
+# TODO testParser.classname - plugin to parse the test section
+# TODO testPropertiesPath - Used in confjunction with the parser
+
+# List of tests to include/exclude
+unitTests.isolate = TestAuthorizationPreEventListener TestDefaultHCatRecord 
TestDefaultHiveMetastoreAuthorizationProvider TestEmbeddedHiveMetaStore 
TestExecDriver TestHadoop20SAuthBridge TestHBaseBulkOutputFormat 
TestHBaseDirectOutputFormat TestHBaseInputFormat TestHBaseMinimrCliDriver 
TestHCatClient TestHCatDynamicPartitioned TestHCatExternalDynamicPartitioned 
TestHCatExternalPartitioned TestHCatHiveCompatibility 
TestHCatHiveThriftCompatibility TestHCatInputFormat TestHCatLoader 
TestHCatLoaderComplexSchema TestHCatLoaderStorer TestHCatMultiOutputFormat 
TestHCatNonPartitioned TestHCatOutputFormat TestHCatPartitioned 
TestHCatPartitionPublish TestHCatRecordSerDe TestHCatSchema TestHCatSchemaUtils 
TestHCatStorer TestHCatStorerMulti TestHCatStorerWrapper TestHCatUtil 
TestHdfsAuthorizationProvider TestHive TestHiveClientCache 
TestHiveMetaStoreWithEnvironmentContext TestHiveRemote TestIDGenerator 
TestInputJobInfo TestJsonSerDe TestLazyHCatRecord TestMetaStoreAuthorization 
TestMetaStoreEve
 ntListener TestMsgBusConnection TestMultiOutputFormat TestNotificationListener 
TestOrcDynamicPartitioned TestOrcHCatLoader TestOrcHCatLoaderComplexSchema 
TestOrcHCatStorer TestPassProperties TestPermsGrp TestPigHCatUtil 
TestRCFileMapReduceInputFormat TestReaderWriter TestRemoteHiveMetaStore 
TestRemoteHiveMetaStoreIpAddress TestRemoteUGIHiveMetaStoreIpAddress 
TestRevisionManager TestSemanticAnalysis TestSequenceFileReadWrite 
TestSetUGIOnBothClientServer TestSetUGIOnOnlyClient TestSetUGIOnOnlyServer 
TestSnapshots TestUseDatabase TestZNodeSetUp
+
+# comes from build-command.xml excludes
+unitTests.exclude = TestSerDe TestHiveMetaStore TestHiveServer2Concurrency 
TestAccumuloCliDriver
+
+# module include / exclude list
+# unitTests.modules.include
+# unitTests.modules.exclude
+
+# Default batch-size when running a test-module
+unitTests.batchSize=20
+
+#The path one level above this property (e.g. for ql/src - moduleName=ql) is 
considered the module name. Can be multiple levels.
+# This should match up with the properties specified further down to override 
module settings. e.g. unitTests.ql., unitTests.itests.hiveUnit.
+# unitTests.subdirForPrefix=target
+
+# Overrides for specific tests modules.
+unitTests.module.ql=ql
+ut.ql.batchSize=15
+ut.ql.isolate=
+ut.ql.include=
+ut.ql.exclude=
+ut.ql.skipBatching=TestDbTxnManager2 
TestTxnCommands2WithSplitUpdateAndVectorization TestTxnCommands2WithSplitUpdate 
TestOrcFile TestVectorRowObject
+# TODO unitTests.ql.prepScript
+# TODO unitTests.ql.execScript
+
+unitTests.module.itests.hive-unit=itests.hive-unit
+ut.itests.hive-unit.batchSize=9
+ut.itests.hive-unit.skipBatching=TestAcidOnTezWithSplitUpdate TestAcidOnTez 
TestMTQueries TestCompactor TestSchedulerQueue TestOperationLoggingAPIWithTez 
TestSSL TestJdbcDriver2 TestJdbcWithMiniHA TestJdbcWithMiniMr
+
+unitTests.module.accumulo-handler=accumulo-handler
+ut.accumulo-handler.batchSize=0
+
+unitTests.module.common=common
+ut.common.batchSize=0
+
+unitTests.module.hbase-handler=hbase-handler
+ut.hbase-handler.batchSize=0
+
+unitTests.module.metastore=metastore
+ut.metastore.batchSize=12
+
+unitTests.module.orc=orc
+ut.orc.batchSize=0
+
+unitTests.module.service=service
+ut.service.batchSize=8
+
+unitTests.module.hcatalog.core=hcatalog.core
+ut.hcatalog.core.batchSize=9
+ut.hcatalog.core.skipBatching=TestHCatMutableDynamicPartitioned 
TestHCatMutablePartitioned
+
+
+qFileTests = clientPositive miniMr clientNegative miniMrNegative hbasePositive 
miniTez spark miniLlap
+qFileTests.propertyFiles.mainProperties = 
itests/src/test/resources/testconfiguration.properties
+
+qFileTest.clientPositive.driver = TestCliDriver
+qFileTest.clientPositive.directory = ql/src/test/queries/clientpositive
+qFileTest.clientPositive.batchSize = 15
+qFileTest.clientPositive.queryFilesProperty = qfile
+qFileTest.clientPositive.exclude = minimr
+qFileTest.clientPositive.groups.minimr = mainProperties.${minimr.query.files}
+
+qFileTest.miniMr.driver = TestMinimrCliDriver
+qFileTest.miniMr.directory = ql/src/test/queries/clientpositive
+qFileTest.miniMr.batchSize = 10
+qFileTest.miniMr.queryFilesProperty = minimr.query.files
+qFileTest.miniMr.include = normal
+qFileTest.miniMr.isolate = flaky
+# normal are tests that run in minimr mode via build-common.xml
+qFileTest.miniMr.groups.normal = mainProperties.${minimr.query.files}
+
+qFileTest.miniMrNegative.driver = TestNegativeMinimrCliDriver
+qFileTest.miniMrNegative.directory = ql/src/test/queries/clientnegative
+qFileTest.miniMrNegative.batchSize = 1000
+qFileTest.miniMrNegative.queryFilesProperty = minimr.query.negative.files
+qFileTest.miniMrNegative.include = normal
+qFileTest.miniMrNegative.isolate = flaky
+# normal are tests that run in minimr mode via build-common.xml
+qFileTest.miniMrNegative.groups.normal = 
mainProperties.${minimr.query.negative.files}
+qFileTest.miniMrNegative.groups.flaky = mapreduce_stack_trace_hadoop20.q
+
+qFileTest.clientNegative.driver = TestNegativeCliDriver
+qFileTest.clientNegative.directory = ql/src/test/queries/clientnegative
+qFileTest.clientNegative.batchSize = 1000
+qFileTest.clientNegative.queryFilesProperty = qfile
+qFileTest.clientNegative.exclude = miniMrNormal failing
+# stats_aggregator_error_1.q fails in both negative client and minimr mode
+# Disable for HIVE-4941 as this tests runs via ant test
+#qFileTest.clientNegative.groups.failing = stats_aggregator_error_1.q
+# normal are run via minimr
+qFileTest.clientNegative.groups.miniMrNormal = cluster_tasklog_retrieval.q 
minimr_broken_pipe.q mapreduce_stack_trace.q mapreduce_stack_trace_turnoff.q 
mapreduce_stack_trace_hadoop20.q mapreduce_stack_trace_turnoff_hadoop20.q
+
+qFileTest.hbasePositive.driver = TestHBaseCliDriver
+qFileTest.hbasePositive.directory = hbase-handler/src/test/queries/positive
+qFileTest.hbasePositive.batchSize = 3
+qFileTest.hbasePositive.queryFilesProperty = qfile
+qFileTest.hbasePositive.isolate = long
+qFileTest.hbasePositive.exclude = minimr
+qFileTest.hbasePositive.groups.long = hbase_queries.q 
hbase_binary_storage_queries.q hbase_binary_map_queries.q hbase_joins.q
+# when listing directories we actually look for .q so we'd have to do work to 
actually make .m execute here
+qFileTest.hbasePositive.groups.minimr = hbase_bulk.m
+
+qFileTest.miniTez.driver = TestMiniTezCliDriver
+qFileTest.miniTez.directory = ql/src/test/queries/clientpositive
+qFileTest.miniTez.batchSize = 15
+qFileTest.miniTez.queryFilesProperty = qfile
+qFileTest.miniTez.include = normal
+#qFileTest.miniTez.exclude = HIVE-8964
+qFileTest.miniTez.groups.normal = mainProperties.${minitez.query.files} 
mainProperties.${minitez.query.files.shared}
+#qFileTest.miniTez.groups.HIVE-8964 = lvj_mapjoin.q
+
+qFileTest.spark.driver = TestSparkCliDriver
+qFileTest.spark.directory = ql/src/test/queries/clientpositive
+qFileTest.spark.batchSize = 15
+qFileTest.spark.queryFilesProperty = qfile
+qFileTest.spark.include = normal
+qFileTest.spark.groups.normal = mainProperties.${spark.query.files}
+
+qFileTest.miniLlap.driver = TestMiniLlapCliDriver
+qFileTest.miniLlap.directory = ql/src/test/queries/clientpositive
+qFileTest.miniLlap.batchSize = 15
+qFileTest.miniLlap.queryFilesProperty = qfile
+qFileTest.miniLlap.include = normal
+qFileTest.miniLlap.groups.normal = mainProperties.${minillap.query.files} 
mainProperties.${minillap.shared.query.files}

Reply via email to