Modified: accumulo/trunk/test/system/test3/bigrow.sh
URL: 
http://svn.apache.org/viewvc/accumulo/trunk/test/system/test3/bigrow.sh?rev=1438962&r1=1438961&r2=1438962&view=diff
==============================================================================
--- accumulo/trunk/test/system/test3/bigrow.sh (original)
+++ accumulo/trunk/test/system/test3/bigrow.sh Sat Jan 26 21:04:27 2013
@@ -15,9 +15,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.server.test.TestIngest --timestamp 1 --size 50 --random 56  
 --rows 1       --start 5000000 --cols 2000000;
-../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.server.test.TestIngest --timestamp 1 --size 50 --random 56  
 --rows 1000000 --start 0       --cols 1;
-#../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.server.test.VerifyIngest --timestamp 1 --size 50 --random 
56 --rows 1      --start 5000000 --cols 2000000;
-../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.server.test.VerifyIngest --size 50 --timestamp 1 --random 
56 --rows 1000000 --start 0       --cols 1;
-../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.server.test.TestIngest --timestamp 1 --size 50 --random 56  
 --rows 1000000 --start 7000000 --cols 1;
-../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.server.test.VerifyIngest --size 50 --timestamp 1 --random 
56 --rows 1000000 --start 7000000 --cols 1;
+../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.test.TestIngest --timestamp 1 --size 50 --random 56   
--rows 1       --start 5000000 --cols 2000000;
+../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.test.TestIngest --timestamp 1 --size 50 --random 56   
--rows 1000000 --start 0       --cols 1;
+#../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.test.VerifyIngest --timestamp 1 --size 50 --random 56 
--rows 1      --start 5000000 --cols 2000000;
+../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.test.VerifyIngest --size 50 --timestamp 1 --random 56 
--rows 1000000 --start 0       --cols 1;
+../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.test.TestIngest --timestamp 1 --size 50 --random 56   
--rows 1000000 --start 7000000 --cols 1;
+../../../bin/accumulo jar ../../../lib/accumulo.jar 
org.apache.accumulo.test.VerifyIngest --size 50 --timestamp 1 --random 56 
--rows 1000000 --start 7000000 --cols 1;

Modified: accumulo/trunk/test/system/test4/README
URL: 
http://svn.apache.org/viewvc/accumulo/trunk/test/system/test4/README?rev=1438962&r1=1438961&r2=1438962&view=diff
==============================================================================
--- accumulo/trunk/test/system/test4/README (original)
+++ accumulo/trunk/test/system/test4/README Sat Jan 26 21:04:27 2013
@@ -3,4 +3,4 @@ Test bulk importing data
 Can run this test with pre-existing splits... use the following command to 
create the table with
 100 pre-existing splits 
 
-hadoop jar ../../../lib/accumulo.jar 
'org.apache.accumulo.server.test.TestIngest$CreateTable' 0 5000000 100
+hadoop jar ../../../lib/accumulo.jar 
'org.apache.accumulo.test.TestIngest$CreateTable' 0 5000000 100

Modified: accumulo/trunk/test/system/test4/bulk_import_test.sh
URL: 
http://svn.apache.org/viewvc/accumulo/trunk/test/system/test4/bulk_import_test.sh?rev=1438962&r1=1438961&r2=1438962&view=diff
==============================================================================
--- accumulo/trunk/test/system/test4/bulk_import_test.sh (original)
+++ accumulo/trunk/test/system/test4/bulk_import_test.sh Sat Jan 26 21:04:27 
2013
@@ -19,26 +19,26 @@ hadoop dfs -rmr /testmf
 
 echo "creating first set of map files"
 
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf01 --timestamp 1 --size 50 --random 56 --rows 1000000 --start 0 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf02 --timestamp 1 --size 50 --random 56 --rows 1000000 --start 1000000 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf03 --timestamp 1 --size 50 --random 56 --rows 1000000 --start 2000000 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf04 --timestamp 1 --size 50 --random 56 --rows 1000000 --start 3000000 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf05 --timestamp 1 --size 50 --random 56 --rows 1000000 --start 4000000 
--cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf01 
--timestamp 1 --size 50 --random 56 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf02 
--timestamp 1 --size 50 --random 56 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf03 
--timestamp 1 --size 50 --random 56 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf04 
--timestamp 1 --size 50 --random 56 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf05 
--timestamp 1 --size 50 --random 56 --rows 1000000 --start 4000000 --cols 1 &
 
 wait
 
 echo "bulk importing"
 
 hadoop dfs -rmr /testmfFail
-../../../bin/accumulo org.apache.accumulo.server.test.BulkImportDirectory -u 
root -p secret -t test_ingest -s /testmf -f /testmfFail
+../../../bin/accumulo org.apache.accumulo.test.BulkImportDirectory -u root -p 
secret -t test_ingest -s /testmf -f /testmfFail
 
 echo "verifying"
 
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 1000000 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 2000000 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 3000000 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 4000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 1000000 --start 4000000 --cols 1 &
 
 wait
 
@@ -46,24 +46,24 @@ hadoop dfs -rmr /testmf
 
 echo "creating second set of map files"
 
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf01 --timestamp 2 --size 50 --random 57 --rows 1000000 --start 0 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf02 --timestamp 2 --size 50 --random 57 --rows 1000000 --start 1000000 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf03 --timestamp 2 --size 50 --random 57 --rows 1000000 --start 2000000 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf04 --timestamp 2 --size 50 --random 57 --rows 1000000 --start 3000000 
--cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf05 --timestamp 2 --size 50 --random 57 --rows 1000000 --start 4000000 
--cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf01 
--timestamp 2 --size 50 --random 57 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf02 
--timestamp 2 --size 50 --random 57 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf03 
--timestamp 2 --size 50 --random 57 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf04 
--timestamp 2 --size 50 --random 57 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest --rfile /testmf/mf05 
--timestamp 2 --size 50 --random 57 --rows 1000000 --start 4000000 --cols 1 &
 
 wait
 
 echo "bulk importing"
 
 hadoop dfs -rmr /testmfFail
-../../../bin/accumulo org.apache.accumulo.server.test.BulkImportDirectory -u 
root -p secret -t test_ingest -s /testmf -f /testmfFail
+../../../bin/accumulo org.apache.accumulo.test.BulkImportDirectory -u root -p 
secret -t test_ingest -s /testmf -f /testmfFail
 
 echo "creating second set of map files"
 
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 0 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 1000000 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1 &
-../../../bin/accumulo org.apache.accumulo.server.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1 &
 

Modified: accumulo/trunk/test/system/upgrade_test.sh
URL: 
http://svn.apache.org/viewvc/accumulo/trunk/test/system/upgrade_test.sh?rev=1438962&r1=1438961&r2=1438962&view=diff
==============================================================================
--- accumulo/trunk/test/system/upgrade_test.sh (original)
+++ accumulo/trunk/test/system/upgrade_test.sh Sat Jan 26 21:04:27 2013
@@ -38,9 +38,9 @@ hadoop fs -rmr /testmfFail
 
 echo "uptest\nsecret\nsecret" | $ONE_THREE_DIR/bin/accumulo init 
--clear-instance-name
 $ONE_THREE_DIR/bin/start-all.sh
-$ONE_THREE_DIR/bin/accumulo org.apache.accumulo.server.test.TestIngest -u root 
-p secret --timestamp 1 --size 50 --random 56 --rows 100000 --start 0 --cols 1
-$ONE_THREE_DIR/bin/accumulo org.apache.accumulo.server.test.TestIngest --rfile 
/testmf/mf01 --timestamp 1 --size 50 --random 56 --rows 100000 --start 100000 
--cols 1
-$ONE_THREE_DIR/bin/accumulo 
org.apache.accumulo.server.test.BulkImportDirectory -u root -p secret -t 
test_ingest --source /testmf --failures /testmfFail
+$ONE_THREE_DIR/bin/accumulo org.apache.accumulo.test.TestIngest -u root -p 
secret --timestamp 1 --size 50 --random 56 --rows 100000 --start 0 --cols 1
+$ONE_THREE_DIR/bin/accumulo org.apache.accumulo.test.TestIngest --rfile 
/testmf/mf01 --timestamp 1 --size 50 --random 56 --rows 100000 --start 100000 
--cols 1
+$ONE_THREE_DIR/bin/accumulo org.apache.accumulo.test.BulkImportDirectory -u 
root -p secret -t test_ingest --source /testmf --failures /testmfFail
 if [ $1 == "dirty" ]; then
        pkill -9 -f accumulo.start
 else 
@@ -54,27 +54,27 @@ echo "==== Starting 1.4 ==="
 #TODO test delete range
 
 $ONE_FOUR_DIR/bin/start-all.sh
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 1 --random 56 --rows 200000 --start 0 --cols 1 
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 200000 --start 0 --cols 1 
 echo "compact -t test_ingest -w" | $ONE_FOUR_DIR/bin/accumulo shell -u root -p 
secret
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 1 --random 56 --rows 200000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 200000 --start 0 --cols 1
 echo "merge -t test_ingest -s 1G" | $ONE_FOUR_DIR/bin/accumulo shell -u root 
-p secret
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 1 --random 56 --rows 200000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 200000 --start 0 --cols 1
 echo "clonetable test_ingest tmp\ndeletetable test_ingest\nrenametable tmp 
test_ingest" | $ONE_FOUR_DIR/bin/accumulo shell -u root -p secret
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 1 --random 56 --rows 200000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 1 --random 56 --rows 200000 --start 0 --cols 1
 
 #test overwriting data writting in 1.3
-$ONE_FOUR_DIR/bin/accumulo org.apache.accumulo.server.test.TestIngest 
--timestamp 2 --size 50 --random 57 --rows 300000 --start 0 --cols 1
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo org.apache.accumulo.test.TestIngest --timestamp 2 
--size 50 --random 57 --rows 300000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
 echo "compact -t test_ingest -w" | $ONE_FOUR_DIR/bin/accumulo shell -u root -p 
secret
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
 
 $ONE_FOUR_DIR/bin/stop-all.sh
 $ONE_FOUR_DIR/bin/start-all.sh
 
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
 
 pkill -f accumulo.start
 $ONE_FOUR_DIR/bin/start-all.sh
 
-$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.server.test.VerifyIngest 
--size 50 --timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
+$ONE_FOUR_DIR/bin/accumulo  org.apache.accumulo.test.VerifyIngest --size 50 
--timestamp 2 --random 57 --rows 300000 --start 0 --cols 1
 


Reply via email to