See <https://builds.apache.org/job/HBase-1.1-JDK7/1797/changes>

Changes:

[enis] HBASE-16721 Concurrency issue in WAL unflushed seqId tracking - ADDENDUM

------------------------------------------
[...truncated 2799 lines...]
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 123.406 sec - 
in org.apache.hadoop.hbase.client.replication.TestReplicationAdminWithClusters
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 389.954 sec - 
in org.apache.hadoop.hbase.client.TestMetaWithReplicas
Running org.apache.hadoop.hbase.client.TestCheckAndMutate
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.906 sec - in 
org.apache.hadoop.hbase.client.TestCheckAndMutate
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 299.65 sec - in 
org.apache.hadoop.hbase.client.TestFromClientSide3
Running org.apache.hadoop.hbase.client.TestCloneSnapshotFromClient
Running org.apache.hadoop.hbase.client.TestHTableUtil
Running org.apache.hadoop.hbase.client.TestReplicaWithCluster
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.518 sec - in 
org.apache.hadoop.hbase.client.TestHTableUtil
Running org.apache.hadoop.hbase.client.TestRestoreSnapshotFromClient
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 146.216 sec - 
in org.apache.hadoop.hbase.client.TestReplicaWithCluster
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 473.839 sec - 
in org.apache.hadoop.hbase.client.TestAdmin1
Running org.apache.hadoop.hbase.client.TestHTablePool$TestHTableThreadLocalPool
Running org.apache.hadoop.hbase.client.TestFromClientSideNoCodec
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.296 sec - in 
org.apache.hadoop.hbase.client.TestFromClientSideNoCodec
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.179 sec - in 
org.apache.hadoop.hbase.client.TestHTablePool$TestHTableThreadLocalPool
Running org.apache.hadoop.hbase.client.TestFromClientSideWithCoprocessor
Running org.apache.hadoop.hbase.client.TestSnapshotFromClientWithRegionReplicas
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 90.315 sec - in 
org.apache.hadoop.hbase.client.TestSnapshotFromClientWithRegionReplicas
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 377.447 sec - 
in org.apache.hadoop.hbase.client.TestCloneSnapshotFromClient
Running org.apache.hadoop.hbase.client.TestMultiParallel
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 359.669 sec - 
in org.apache.hadoop.hbase.client.TestRestoreSnapshotFromClient
Tests run: 78, Failures: 0, Errors: 0, Skipped: 4, Time elapsed: 642.413 sec - 
in org.apache.hadoop.hbase.client.TestFromClientSide
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 50.457 sec - 
in org.apache.hadoop.hbase.client.TestMultiParallel
Running org.apache.hadoop.hbase.client.TestScannersFromClientSide
Running 
org.apache.hadoop.hbase.client.TestCloneSnapshotFromClientWithRegionReplicas
Running org.apache.hadoop.hbase.client.TestHTablePool$TestHTableReusablePool
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 55.976 sec - in 
org.apache.hadoop.hbase.client.TestScannersFromClientSide
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.024 sec - in 
org.apache.hadoop.hbase.client.TestHTablePool$TestHTableReusablePool
Running org.apache.hadoop.hbase.client.TestClientTimeouts
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.729 sec - in 
org.apache.hadoop.hbase.client.TestClientTimeouts
Running org.apache.hadoop.hbase.client.TestMultipleTimestamps
Running org.apache.hadoop.hbase.client.TestConnectionImplementation
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.616 sec - in 
org.apache.hadoop.hbase.client.TestConnectionImplementation
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.526 sec - in 
org.apache.hadoop.hbase.client.TestMultipleTimestamps
Running org.apache.hadoop.hbase.trace.TestHTraceHooks
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.337 sec - in 
org.apache.hadoop.hbase.trace.TestHTraceHooks
Running org.apache.hadoop.hbase.quotas.TestQuotaTableUtil
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.467 sec - in 
org.apache.hadoop.hbase.quotas.TestQuotaTableUtil
Running org.apache.hadoop.hbase.client.TestSizeFailures
Running org.apache.hadoop.hbase.quotas.TestQuotaAdmin
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.393 sec - in 
org.apache.hadoop.hbase.quotas.TestQuotaAdmin
Running org.apache.hadoop.hbase.TestDrainingServer
Running org.apache.hadoop.hbase.quotas.TestQuotaThrottle
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.235 sec - in 
org.apache.hadoop.hbase.quotas.TestQuotaThrottle
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 67.848 sec - in 
org.apache.hadoop.hbase.TestDrainingServer
Running org.apache.hadoop.hbase.snapshot.TestFlushSnapshotFromClient
Running org.apache.hadoop.hbase.snapshot.TestRestoreFlushSnapshotFromClient
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 129.829 sec - 
in org.apache.hadoop.hbase.client.TestSizeFailures
Tests run: 78, Failures: 0, Errors: 0, Skipped: 4, Time elapsed: 617.001 sec - 
in org.apache.hadoop.hbase.client.TestFromClientSideWithCoprocessor
Running org.apache.hadoop.hbase.snapshot.TestSnapshotDescriptionUtils
Running org.apache.hadoop.hbase.snapshot.TestSecureExportSnapshot
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.856 sec - in 
org.apache.hadoop.hbase.snapshot.TestSnapshotDescriptionUtils
Running org.apache.hadoop.hbase.snapshot.TestSnapshotClientRetries
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.744 sec - in 
org.apache.hadoop.hbase.snapshot.TestSnapshotClientRetries
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 484.112 sec - 
in org.apache.hadoop.hbase.client.TestCloneSnapshotFromClientWithRegionReplicas
Running org.apache.hadoop.hbase.coprocessor.TestRegionObserverScannerOpenHook

Results :

Failed tests: 
org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures.testTruncateWithFailover(org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures)
  Run 1: 
TestMasterFailoverWithProcedures.testTruncateWithFailover:312->testTruncateWithFailoverAtStep:351
 {ENCODED => d615d882f268c5179d3771c29f5c99cb, NAME => 
'testTruncateWithFailoverAtStep4,,1476745717420.d615d882f268c5179d3771c29f5c99cb.',
 STARTKEY => '', ENDKEY => 'a'} region dir does not exist
  Run 2: 
TestMasterFailoverWithProcedures.testTruncateWithFailover:312->testTruncateWithFailoverAtStep:351
 {ENCODED => 394ae93f80ffcf4ba00aeeab0e71e60a, NAME => 
'testTruncateWithFailoverAtStep4,,1476745825674.394ae93f80ffcf4ba00aeeab0e71e60a.',
 STARTKEY => '', ENDKEY => 'a'} region dir does not exist
  Run 3: 
TestMasterFailoverWithProcedures.testTruncateWithFailover:312->testTruncateWithFailoverAtStep:351
 {ENCODED => daa17b2ef53d3f1921e1f81256c76ecf, NAME => 
'testTruncateWithFailoverAtStep4,,1476745863120.daa17b2ef53d3f1921e1f81256c76ecf.',
 STARTKEY => '', ENDKEY => 'a'} region dir does not exist

Tests in error: 
  TestChangingEncoding.testChangingEncoding:201->setEncodingConf:182 » 
TestTimedOut
  TestChangingEncoding.testChangingEncodingWithCompaction:210->prepareTest:97 » 
RetriesExhausted
  TestChangingEncoding.testCrazyRandomChanges:244->prepareTest:97 » 
RetriesExhausted
org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles.testRegionCrossingHFileSplit(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles)
  Run 1: 
TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:184->testRegionCrossingHFileSplit:206->runTest:242->runTest:248->runTest:301
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:184->testRegionCrossingHFileSplit:206->runTest:242->runTest:248->runTest:270
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles)
  Run 1: 
TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom:193->testRegionCrossingHFileSplit:206->runTest:242->runTest:248->runTest:287->Object.wait:461->Object.wait:-2
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom:193->testRegionCrossingHFileSplit:206->runTest:242->runTest:248->runTest:301->Object.wait:-2
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles)
  Run 1: 
TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom:202->testRegionCrossingHFileSplit:206->runTest:242->runTest:248->runTest:301
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom:202->testRegionCrossingHFileSplit:206->runTest:242->runTest:248->runTest:301
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles.testSimpleHFileSplit(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles)
  Run 1: 
TestLoadIncrementalHFiles.testSimpleHFileSplit:166->runTest:242->runTest:248->runTest:301
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFiles.testSimpleHFileSplit:166->runTest:242->runTest:248->runTest:301
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint.testRegionCrossingHFileSplit(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint)
  Run 1: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:184->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:184->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:270
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint.testRegionCrossingHFileSplitRowBloom(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint)
  Run 1: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom:193->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom:193->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:287
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint.testRegionCrossingHFileSplitRowColBloom(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint)
  Run 1: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom:202->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:282->Object.wait:461->Object.wait:-2
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom:202->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:270
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint.testSimpleHFileSplit(org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint)
  Run 1: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testSimpleHFileSplit:166->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:270
 » TestTimedOut
  Run 2: 
TestLoadIncrementalHFilesUseSecurityEndPoint>TestLoadIncrementalHFiles.testSimpleHFileSplit:166->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles.testRegionCrossingHFileSplit(org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles)
  Run 1: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:184->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut
  Run 2: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:184->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom(org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles)
  Run 1: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom:193->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut
  Run 2: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowBloom:193->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:270
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom(org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles)
  Run 1: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom:202->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut
  Run 2: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testRegionCrossingHFileSplitRowColBloom:202->TestLoadIncrementalHFiles.testRegionCrossingHFileSplit:206->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:270
 » TestTimedOut

org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles.testSimpleHFileSplit(org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles)
  Run 1: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testSimpleHFileSplit:166->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut
  Run 2: 
TestSecureLoadIncrementalHFiles>TestLoadIncrementalHFiles.testSimpleHFileSplit:166->TestLoadIncrementalHFiles.runTest:242->TestLoadIncrementalHFiles.runTest:248->TestLoadIncrementalHFiles.runTest:301
 » TestTimedOut

org.apache.hadoop.hbase.replication.TestReplicationChangingPeerRegionservers.org.apache.hadoop.hbase.replication.TestReplicationChangingPeerRegionservers
  Run 1: 
TestReplicationChangingPeerRegionservers>TestReplicationBase.setUpBeforeClass:129
 » IO
  Run 2: 
TestReplicationChangingPeerRegionservers>TestReplicationBase.tearDownAfterClass:158
 » NullPointer

  
TestReplicationKillMasterRS.killOneMasterRS:33->TestReplicationKillRS.loadTableAndKillRS:93
 » TestTimedOut
org.apache.hadoop.hbase.replication.TestReplicationKillSlaveRS.org.apache.hadoop.hbase.replication.TestReplicationKillSlaveRS
  Run 1: TestReplicationKillSlaveRS>TestReplicationBase.setUpBeforeClass:144 » 
IO java....
  Run 2: TestReplicationKillSlaveRS>TestReplicationBase.tearDownAfterClass:158 
» NullPointer

Flaked tests: 
org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent(org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence)
  Run 1: 
TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent:182->runTestSnapshotDeleteIndependent:424
 » IO
  Run 2: PASS


Tests run: 1701, Failures: 1, Errors: 18, Skipped: 17, Flakes: 1

[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache HBase ....................................... SUCCESS [ 14.287 s]
[INFO] Apache HBase - Checkstyle .......................... SUCCESS [  2.454 s]
[INFO] Apache HBase - Resource Bundle ..................... SUCCESS [  0.732 s]
[INFO] Apache HBase - Annotations ......................... SUCCESS [  4.558 s]
[INFO] Apache HBase - Protocol ............................ SUCCESS [ 25.142 s]
[INFO] Apache HBase - Common .............................. SUCCESS [02:30 min]
[INFO] Apache HBase - Procedure ........................... SUCCESS [03:51 min]
[INFO] Apache HBase - Client .............................. SUCCESS [01:54 min]
[INFO] Apache HBase - Hadoop Compatibility ................ SUCCESS [ 10.697 s]
[INFO] Apache HBase - Hadoop Two Compatibility ............ SUCCESS [ 14.221 s]
[INFO] Apache HBase - Prefix Tree ......................... SUCCESS [ 17.435 s]
[INFO] Apache HBase - Server .............................. FAILURE [  03:22 h]
[INFO] Apache HBase - Testing Util ........................ SKIPPED
[INFO] Apache HBase - Thrift .............................. SKIPPED
[INFO] Apache HBase - Rest ................................ SKIPPED
[INFO] Apache HBase - Shell ............................... SKIPPED
[INFO] Apache HBase - Integration Tests ................... SKIPPED
[INFO] Apache HBase - Examples ............................ SKIPPED
[INFO] Apache HBase - Assembly ............................ SKIPPED
[INFO] Apache HBase - Shaded .............................. SKIPPED
[INFO] Apache HBase - Shaded - Client ..................... SKIPPED
[INFO] Apache HBase - Shaded - Server ..................... SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 03:32 h
[INFO] Finished at: 2016-10-18T01:35:37+00:00
[INFO] Final Memory: 398M/604M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.18.1:test 
(secondPartTestsExecution) on project hbase-server: ExecutionException: 
java.lang.RuntimeException: java.lang.RuntimeException: 
org.apache.maven.surefire.report.ReporterException: When writing xml report 
stdout/stderr: /tmp/stderr3110604644229013089deferred (No such file or 
directory) -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hbase-server
Build step 'Invoke top-level Maven targets' marked build as failure
Performing Post build task...
Match found for :.* : True
Logical operation result is TRUE
Running script  : # Post-build task script. TODO: Check this in and have all 
builds reference check-in.
pwd && ls
# NOTE!!!! The below code has been copied and pasted from 
./dev-tools/run-test.sh
# Do not change here without syncing there and vice-versa.
ZOMBIE_TESTS_COUNT=`jps -v | grep surefirebooter | grep -e '-Dhbase.test' | wc 
-l`
if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
 echo "Suspicious java process found - waiting 30s to see if there are just 
slow to stop"
 sleep 30
 ZOMBIE_TESTS_COUNT=`jps -v | grep surefirebooter | grep -e '-Dhbase.test' | wc 
-l`
 if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
   echo " {color:red}There appear to be $ZOMBIE_TESTS_COUNT zombie 
tests{color}, they should have been killed by surefire but survived"
   jps -v | grep surefirebooter | grep -e '-Dhbase.test'
   jps -v | grep surefirebooter | grep -e '-Dhbase.test' | cut -d ' ' -f 1 | 
xargs -n 1 jstack
   # Exit with error
   exit 1
 else
   echo "We're ok: there is no zombie test, but some tests took some time to 
stop"
 fi
else
  echo "We're ok: there is no zombie test"
fi
[HBase-1.1-JDK7] $ /bin/bash -xe /tmp/hudson3383862304662142237.sh
+ pwd
<https://builds.apache.org/job/HBase-1.1-JDK7/ws/>
+ ls
bin
CHANGES.txt
conf
dev-support
hbase-annotations
hbase-assembly
hbase-checkstyle
hbase-client
hbase-common
hbase-examples
hbase-hadoop2-compat
hbase-hadoop-compat
hbase-it
hbase-native-client
hbase-prefix-tree
hbase-procedure
hbase-protocol
hbase-resource-bundle
hbase-rest
hbase-server
hbase-shaded
hbase-shell
hbase-testing-util
hbase-thrift
LICENSE.txt
NOTICE.txt
pom.xml
README.txt
src
target
++ jps -v
++ grep surefirebooter
++ grep -e -Dhbase.test
++ wc -l
+ ZOMBIE_TESTS_COUNT=0
+ [[ 0 != 0 ]]
+ echo 'We'\''re ok: there is no zombie test'
We're ok: there is no zombie test
POST BUILD TASK : SUCCESS
END OF POST BUILD TASK : 0
Archiving artifacts
Recording test results
Updating HBASE-16721

Reply via email to