Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3794/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
timeout waiting to see all nodes active

Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
        at 
__randomizedtesting.SeedInfo.seed([C63C9A546CBCAEC5:4E68A58EC240C33D]:0)
        at org.junit.Assert.fail(Assert.java:93)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 11394 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/init-core-data-001
   [junit4]   2> 783054 INFO  
(SUITE-PeerSyncReplicationTest-seed#[C63C9A546CBCAEC5]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using PointFields
   [junit4]   2> 783059 INFO  
(SUITE-PeerSyncReplicationTest-seed#[C63C9A546CBCAEC5]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: 
@org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) 
w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 783059 INFO  
(SUITE-PeerSyncReplicationTest-seed#[C63C9A546CBCAEC5]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 783061 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 783061 INFO  (Thread-1255) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 783061 INFO  (Thread-1255) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 783162 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkTestServer start zk server on port:58431
   [junit4]   2> 783195 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 783199 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 783202 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 783205 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 783208 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 783211 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 783214 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 783216 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 783219 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 783223 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 783225 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 783825 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/control-001/cores/collection1
   [junit4]   2> 783827 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 783829 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@20a17812{/,null,AVAILABLE}
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@7b7103d9{HTTP/1.1,[http/1.1]}{127.0.0.1:58434}
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server Started @789132ms
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/tempDir-001/control/data,
 hostContext=/, hostPort=58434, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/control-001/cores}
   [junit4]   2> 783838 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 783838 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-24T05:23:55.593Z
   [junit4]   2> 783842 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 783842 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/control-001/solr.xml
   [junit4]   2> 783855 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:58431/solr
   [junit4]   2> 783900 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58434_ 
   ] o.a.s.c.OverseerElectionContext I am going to be the leader 
127.0.0.1:58434_
   [junit4]   2> 783902 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58434_ 
   ] o.a.s.c.Overseer Overseer 
(id=97336389456297988-127.0.0.1:58434_-n_0000000000) starting
   [junit4]   2> 783915 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58434_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:58434_
   [junit4]   2> 783922 INFO  
(zkCallback-1626-thread-1-processing-n:127.0.0.1:58434_) [n:127.0.0.1:58434_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 783966 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58434_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/control-001/cores
   [junit4]   2> 783967 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58434_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 784036 INFO  
(OverseerStateUpdate-97336389456297988-127.0.0.1:58434_-n_0000000000) 
[n:127.0.0.1:58434_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 785051 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 785069 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 785176 WARN  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 785179 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 785198 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection 
control_collection
   [junit4]   2> 785217 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/control-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/control-001/cores/collection1/data/]
   [junit4]   2> 785217 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@3823f261
   [junit4]   2> 785223 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=41, maxMergeAtOnceExplicit=50, maxMergedSegmentMB=34.193359375, 
floorSegmentMB=2.1611328125, forceMergeDeletesPctAllowed=19.81489000216662, 
segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.6613021345456811
   [junit4]   2> 785235 WARN  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 785256 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler 
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 785256 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 785257 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Hard AutoCommit: disabled
   [junit4]   2> 785257 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Soft AutoCommit: disabled
   [junit4]   2> 785258 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: 
minMergeSize=1000, mergeFactor=49, maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.21984337681645602]
   [junit4]   2> 785259 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@e68b836[collection1] main]
   [junit4]   2> 785261 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 785261 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 785261 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 785263 INFO  
(searcherExecutor-3044-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@e68b836[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 785263 INFO  
(coreLoadExecutor-3043-thread-1-processing-n:127.0.0.1:58434_) 
[n:127.0.0.1:58434_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1557382233606586368
   [junit4]   2> 785274 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas 
found to continue.
   [junit4]   2> 785274 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new 
leader - try and sync
   [junit4]   2> 785274 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:58434/collection1/
   [junit4]   2> 785274 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync 
replicas to me
   [junit4]   2> 785274 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
http://127.0.0.1:58434/collection1/ has no replicas
   [junit4]   2> 785281 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new 
leader: http://127.0.0.1:58434/collection1/ shard1
   [junit4]   2> 785376 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 785378 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:58431/solr ready
   [junit4]   2> 785378 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection 
loss:false
   [junit4]   2> 785437 INFO  
(coreZkRegister-3036-thread-1-processing-n:127.0.0.1:58434_ x:collection1 
c:control_collection) [n:127.0.0.1:58434_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery 
necessary
   [junit4]   2> 785683 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001/cores/collection1
   [junit4]   2> 785685 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001
   [junit4]   2> 785685 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 785687 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@36467f7c{/,null,AVAILABLE}
   [junit4]   2> 785688 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@a22dc81{HTTP/1.1,[http/1.1]}{127.0.0.1:58439}
   [junit4]   2> 785688 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server Started @790982ms
   [junit4]   2> 785688 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/tempDir-001/jetty1,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=58439, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001/cores}
   [junit4]   2> 785689 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 785689 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 785689 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 785689 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 785689 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-24T05:23:57.444Z
   [junit4]   2> 785694 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 785694 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001/solr.xml
   [junit4]   2> 785705 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:58431/solr
   [junit4]   2> 785723 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58439_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 785732 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58439_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:58439_
   [junit4]   2> 785736 INFO  
(zkCallback-1626-thread-1-processing-n:127.0.0.1:58434_) [n:127.0.0.1:58434_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 785737 INFO  
(zkCallback-1635-thread-1-processing-n:127.0.0.1:58439_) [n:127.0.0.1:58439_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 785736 INFO  (zkCallback-1630-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 785828 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58439_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001/cores
   [junit4]   2> 785828 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58439_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 785832 INFO  
(OverseerStateUpdate-97336389456297988-127.0.0.1:58434_-n_0000000000) 
[n:127.0.0.1:58434_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 786852 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 786867 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 786983 WARN  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 786989 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 787011 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 787011 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 787011 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@3823f261
   [junit4]   2> 787015 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=41, maxMergeAtOnceExplicit=50, 
maxMergedSegmentMB=34.193359375, floorSegmentMB=2.1611328125, 
forceMergeDeletesPctAllowed=19.81489000216662, segmentsPerTier=22.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6613021345456811
   [junit4]   2> 787031 WARN  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 787048 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 787048 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 787049 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 787049 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 787050 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=49, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.21984337681645602]
   [junit4]   2> 787051 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@16db8aa6[collection1] main]
   [junit4]   2> 787061 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 787062 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 787062 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 787063 INFO  
(searcherExecutor-3055-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@16db8aa6[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 787064 INFO  
(coreLoadExecutor-3054-thread-1-processing-n:127.0.0.1:58439_) 
[n:127.0.0.1:58439_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1557382235495071744
   [junit4]   2> 787074 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to 
continue.
   [junit4]   2> 787074 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try 
and sync
   [junit4]   2> 787074 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:58439/collection1/
   [junit4]   2> 787074 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 787074 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:58439/collection1/ has no 
replicas
   [junit4]   2> 787082 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:58439/collection1/ shard1
   [junit4]   2> 787235 INFO  
(coreZkRegister-3049-thread-1-processing-n:127.0.0.1:58439_ x:collection1 
c:collection1) [n:127.0.0.1:58439_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 787463 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001/cores/collection1
   [junit4]   2> 787464 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001
   [junit4]   2> 787466 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 787468 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@292ab8d0{/,null,AVAILABLE}
   [junit4]   2> 787469 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@1d6deaaf{HTTP/1.1,[http/1.1]}{127.0.0.1:58443}
   [junit4]   2> 787470 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server Started @792765ms
   [junit4]   2> 787470 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/tempDir-001/jetty2,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=58443, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001/cores}
   [junit4]   2> 787470 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 787471 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 787471 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 787471 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 787471 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-24T05:23:59.226Z
   [junit4]   2> 787475 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 787475 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001/solr.xml
   [junit4]   2> 787505 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:58431/solr
   [junit4]   2> 787523 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58443_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 787535 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58443_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:58443_
   [junit4]   2> 787538 INFO  
(zkCallback-1635-thread-1-processing-n:127.0.0.1:58439_) [n:127.0.0.1:58439_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 787538 INFO  
(zkCallback-1626-thread-1-processing-n:127.0.0.1:58434_) [n:127.0.0.1:58434_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 787538 INFO  (zkCallback-1630-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 787540 INFO  
(zkCallback-1641-thread-1-processing-n:127.0.0.1:58443_) [n:127.0.0.1:58443_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 787619 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58443_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001/cores
   [junit4]   2> 787619 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58443_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 787621 INFO  
(OverseerStateUpdate-97336389456297988-127.0.0.1:58434_-n_0000000000) 
[n:127.0.0.1:58434_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 788637 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 788654 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 788765 WARN  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 788767 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 788808 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 788817 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 788819 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@3823f261
   [junit4]   2> 788829 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=41, maxMergeAtOnceExplicit=50, 
maxMergedSegmentMB=34.193359375, floorSegmentMB=2.1611328125, 
forceMergeDeletesPctAllowed=19.81489000216662, segmentsPerTier=22.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6613021345456811
   [junit4]   2> 788837 WARN  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 788857 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 788857 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 788858 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 788858 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 788859 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=49, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.21984337681645602]
   [junit4]   2> 788861 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@77619165[collection1] main]
   [junit4]   2> 788863 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 788865 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 788866 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 788870 INFO  
(searcherExecutor-3066-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
c:collection1) [n:127.0.0.1:58443_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@77619165[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 788877 INFO  
(coreLoadExecutor-3065-thread-1-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1557382237396140032
   [junit4]   2> 788884 INFO  
(coreZkRegister-3060-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
c:collection1) [n:127.0.0.1:58443_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 788885 INFO  
(updateExecutor-1638-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 788885 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 788885 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 788885 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 788885 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 788885 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [http://127.0.0.1:58439/collection1/] 
and I am [http://127.0.0.1:58443/collection1/]
   [junit4]   2> 788888 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [http://127.0.0.1:58439]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:58443_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 788891 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 788892 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 788892 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:58443_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:58443","node_name":"127.0.0.1:58443_","state":"down"}
   [junit4]   2> 789424 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001/cores/collection1
   [junit4]   2> 789426 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001
   [junit4]   2> 789426 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 789439 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@25d37380{/,null,AVAILABLE}
   [junit4]   2> 789440 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@e187978{HTTP/1.1,[http/1.1]}{127.0.0.1:58448}
   [junit4]   2> 789440 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.Server Started @794735ms
   [junit4]   2> 789440 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/tempDir-001/jetty3,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=58448, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001/cores}
   [junit4]   2> 789440 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 789441 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 789441 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 789441 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 789441 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-24T05:24:01.196Z
   [junit4]   2> 789445 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 789445 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001/solr.xml
   [junit4]   2> 789456 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:58431/solr
   [junit4]   2> 789474 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58448_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 789483 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58448_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:58448_
   [junit4]   2> 789485 INFO  (zkCallback-1630-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 789486 INFO  
(zkCallback-1626-thread-2-processing-n:127.0.0.1:58434_) [n:127.0.0.1:58434_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 789486 INFO  
(zkCallback-1635-thread-1-processing-n:127.0.0.1:58439_) [n:127.0.0.1:58439_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 789487 INFO  
(zkCallback-1648-thread-1-processing-n:127.0.0.1:58448_) [n:127.0.0.1:58448_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 789487 INFO  
(zkCallback-1641-thread-1-processing-n:127.0.0.1:58443_) [n:127.0.0.1:58443_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 789623 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58448_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001/cores
   [junit4]   2> 789623 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [n:127.0.0.1:58448_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 789626 INFO  
(OverseerStateUpdate-97336389456297988-127.0.0.1:58434_-n_0000000000) 
[n:127.0.0.1:58434_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 789899 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:58443_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:58443","node_name":"127.0.0.1:58443_","state":"recovering"}
   [junit4]   2> 789899 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 789899 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:58443_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1008
   [junit4]   2> 790645 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 790660 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 790775 WARN  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 790777 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 790802 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 790803 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 790803 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@3823f261
   [junit4]   2> 790806 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=41, maxMergeAtOnceExplicit=50, 
maxMergedSegmentMB=34.193359375, floorSegmentMB=2.1611328125, 
forceMergeDeletesPctAllowed=19.81489000216662, segmentsPerTier=22.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6613021345456811
   [junit4]   2> 790823 WARN  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 790844 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 790844 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 790845 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 790845 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 790846 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=49, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.21984337681645602]
   [junit4]   2> 790847 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@be4a3bd[collection1] main]
   [junit4]   2> 790848 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 790849 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 790849 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 790850 INFO  
(searcherExecutor-3077-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
c:collection1) [n:127.0.0.1:58448_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@be4a3bd[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 790851 INFO  
(coreLoadExecutor-3076-thread-1-processing-n:127.0.0.1:58448_) 
[n:127.0.0.1:58448_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1557382239466029056
   [junit4]   2> 790857 INFO  
(coreZkRegister-3071-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
c:collection1) [n:127.0.0.1:58448_ c:collection1 s:shard1 r:core_node3 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 790857 INFO  
(updateExecutor-1645-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 790867 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 790867 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 790867 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 790867 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 790867 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [http://127.0.0.1:58439/collection1/] 
and I am [http://127.0.0.1:58448/collection1/]
   [junit4]   2> 790870 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [http://127.0.0.1:58439]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:58448_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 790872 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 790873 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 790873 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:58448_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:58448","node_name":"127.0.0.1:58448_","state":"down"}
   [junit4]   2> 791109 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 791109 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 
for each attempt
   [junit4]   2> 791109 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: 
collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 791874 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:58448_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:58448","node_name":"127.0.0.1:58448_","state":"recovering"}
   [junit4]   2> 791874 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 791874 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:58448_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1002
   [junit4]   2> 796900 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [http://127.0.0.1:58439/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 796900 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=http://127.0.0.1:58443 START replicas=[http://127.0.0.1:58439/collection1/] 
nUpdates=1000
   [junit4]   2> 796903 INFO  (qtp1278625262-8339) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 796903 INFO  (qtp1278625262-8339) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=1
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 796904 INFO  
(recoveryExecutor-1639-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 798876 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [http://127.0.0.1:58439/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 798876 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=http://127.0.0.1:58448 START replicas=[http://127.0.0.1:58439/collection1/] 
nUpdates=1000
   [junit4]   2> 798878 INFO  (qtp1278625262-8343) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 798878 INFO  (qtp1278625262-8343) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 798879 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 798880 INFO  
(recoveryExecutor-1646-thread-1-processing-n:127.0.0.1:58448_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:58448_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 799118 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 799121 INFO  (qtp1425114124-8304) [n:127.0.0.1:58434_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 799121 INFO  (qtp1425114124-8304) [n:127.0.0.1:58434_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 799122 INFO  (qtp1425114124-8304) [n:127.0.0.1:58434_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 799122 INFO  (qtp1425114124-8304) [n:127.0.0.1:58434_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 1
   [junit4]   2> 799126 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 799126 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 799129 INFO  (qtp1279876433-8404) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 799129 INFO  (qtp970779132-8369) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 799129 INFO  (qtp1279876433-8404) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 799129 INFO  (qtp970779132-8369) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 799129 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 799129 INFO  (qtp1279876433-8404) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 799129 INFO  (qtp970779132-8369) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 799129 INFO  (qtp1279876433-8404) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:58439/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 799129 INFO  (qtp970779132-8369) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:58439/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 799129 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:58439/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 2
   [junit4]   2> 799130 INFO  (qtp1278625262-8346) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 6
   [junit4]   2> 799132 INFO  (qtp1278625262-8342) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 799134 INFO  (qtp970779132-8370) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 799135 INFO  (qtp1279876433-8405) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 801138 INFO  (qtp1425114124-8305) [n:127.0.0.1:58434_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1557382250251681792)} 0 2
   [junit4]   2> 801144 INFO  (qtp970779132-8370) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&_version_=-1557382250253778944&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1557382250253778944)} 0 2
   [junit4]   2> 801144 INFO  (qtp1279876433-8406) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&_version_=-1557382250253778944&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1557382250253778944)} 0 2
   [junit4]   2> 801144 INFO  (qtp1278625262-8339) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1557382250253778944)} 0 4
   [junit4]   2> 801153 INFO  (qtp970779132-8372) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[0
 (1557382250264264704)]} 0 1
   [junit4]   2> 801154 INFO  (qtp1279876433-8407) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[0
 (1557382250264264704)]} 0 1
   [junit4]   2> 801154 INFO  (qtp1278625262-8344) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[0 (1557382250264264704)]} 0 6
   [junit4]   2> 801157 INFO  (qtp970779132-8372) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[1
 (1557382250270556160)]} 0 0
   [junit4]   2> 801157 INFO  (qtp1279876433-8407) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[1
 (1557382250270556160)]} 0 0
   [junit4]   2> 801157 INFO  (qtp1278625262-8343) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[1 (1557382250270556160)]} 0 2
   [junit4]   2> 801159 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[2
 (1557382250273701888)]} 0 0
   [junit4]   2> 801160 INFO  (qtp1279876433-8409) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[2
 (1557382250273701888)]} 0 0
   [junit4]   2> 801161 INFO  (qtp1278625262-8345) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[2 (1557382250273701888)]} 0 2
   [junit4]   2> 801162 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[3
 (1557382250276847616)]} 0 0
   [junit4]   2> 801163 INFO  (qtp1279876433-8409) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[3
 (1557382250276847616)]} 0 0
   [junit4]   2> 801164 INFO  (qtp1278625262-8341) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[3 (1557382250276847616)]} 0 2
   [junit4]   2> 801165 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[4
 (1557382250279993344)]} 0 0
   [junit4]   2> 801166 INFO  (qtp1279876433-8404) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[4
 (1557382250279993344)]} 0 0
   [junit4]   2> 801166 INFO  (qtp1278625262-8346) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[4 (1557382250279993344)]} 0 2
   [junit4]   2> 801168 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[5
 (1557382250283139072)]} 0 0
   [junit4]   2> 801168 INFO  (qtp1279876433-8405) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[5
 (1557382250283139072)]} 0 0
   [junit4]   2> 801169 INFO  (qtp1278625262-8342) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[5 (1557382250283139072)]} 0 2
   [junit4]   2> 801171 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[6
 (1557382250286284800)]} 0 0
   [junit4]   2> 801171 INFO  (qtp1279876433-8405) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[6
 (1557382250286284800)]} 0 0
   [junit4]   2> 801172 INFO  (qtp1278625262-8339) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[6 (1557382250286284800)]} 0 2
   [junit4]   2> 801174 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[7
 (1557382250289430528)]} 0 0
   [junit4]   2> 801174 INFO  (qtp1279876433-8405) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[7
 (1557382250289430528)]} 0 0
   [junit4]   2> 801174 INFO  (qtp1278625262-8344) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[7 (1557382250289430528)]} 0 1
   [junit4]   2> 801176 INFO  (qtp970779132-8374) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[8
 (1557382250291527680)]} 0 0
   [junit4]   2> 801176 INFO  (qtp1279876433-8405) [n:127.0.0.1:58448_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:58439/collection1/&wt=javabin&version=2}{add=[8
 (1557382250291527680)]} 0 0
   [junit4]   2> 801177 INFO  (qtp1278625262-8343) [n:127.0.0.1:58439_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProc

[...truncated too long message...]

junit4]   2>    at 
org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:589)
   [junit4]   2>        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 
   [junit4]   2> 987367 INFO  (qtp1278625262-8521) [n:127.0.0.1:58439_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:58443_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=400 QTime=33116
   [junit4]   2> 987375 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ChaosMonkey monkey: stop shard! 58443
   [junit4]   2> 987375 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.CoreContainer Shutting down CoreContainer instance=136519551
   [junit4]   2> 987375 WARN  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1] 
coreNodeName=[core_node2]
   [junit4]   2> 987379 WARN  
(updateExecutor-1652-thread-2-processing-n:127.0.0.1:58443_) 
[n:127.0.0.1:58443_ c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.DefaultSolrCoreState Skipping recovery because Solr is shutdown
   [junit4]   2> 991707 INFO  
(recoveryExecutor-1653-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy RecoveryStrategy has been 
closed
   [junit4]   2> 991707 INFO  
(recoveryExecutor-1653-thread-1-processing-n:127.0.0.1:58443_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:58443_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Finished recovery process, 
successful=[false]
   [junit4]   2> 991708 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.Overseer Overseer (id=97336389456298001-127.0.0.1:58443_-n_0000000004) 
closing
   [junit4]   2> 991708 INFO  
(OverseerStateUpdate-97336389456298001-127.0.0.1:58443_-n_0000000004) 
[n:127.0.0.1:58443_    ] o.a.s.c.Overseer Overseer Loop exiting : 
127.0.0.1:58443_
   [junit4]   2> 991715 WARN  
(zkCallback-1655-thread-4-processing-n:127.0.0.1:58443_) [n:127.0.0.1:58443_    
] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to 
ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 993221 INFO  
(zkCallback-1655-thread-3-processing-n:127.0.0.1:58443_) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SolrCore 
[collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@4beb459
   [junit4]   2> 993221 WARN  
(zkCallback-1655-thread-3-processing-n:127.0.0.1:58443_) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Stopping recovery for core=[collection1] coreNodeName=[core_node2]
   [junit4]   2> 993260 INFO  
(zkCallback-1655-thread-3-processing-n:127.0.0.1:58443_) [n:127.0.0.1:58443_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.m.SolrMetricManager 
Closing metric reporters for: solr.core.collection1
   [junit4]   2> 993261 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 993262 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.AbstractConnector Stopped 
ServerConnector@19618344{HTTP/1.1,[http/1.1]}{127.0.0.1:58443}
   [junit4]   2> 993262 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@60aa5f1{/,null,UNAVAILABLE}
   [junit4]   2> 993264 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ChaosMonkey monkey: stop shard! 58448
   [junit4]   2> 993265 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[C63C9A546CBCAEC5]) [    ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1:58431 58431
   [junit4]   2> 993323 INFO  (Thread-1255) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1:58431 58431
   [junit4]   2> 994819 WARN  (Thread-1255) [    ] o.a.s.c.ZkTestServer Watch 
limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/aliases.json
   [junit4]   2>        6       /solr/clusterprops.json
   [junit4]   2>        5       /solr/security.json
   [junit4]   2>        5       /solr/configs/conf1
   [junit4]   2>        4       /solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/clusterstate.json
   [junit4]   2>        2       
/solr/overseer_elect/election/97336389456297992-127.0.0.1:58439_-n_0000000001
   [junit4]   2>        2       
/solr/collections/collection1/leader_elect/shard1/election/97336389456297992-core_node1-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2>        207     /solr/overseer/collection-queue-work
   [junit4]   2>        41      /solr/overseer/queue
   [junit4]   2>        6       /solr/collections
   [junit4]   2>        6       /solr/overseer/queue-work
   [junit4]   2>        5       /solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test 
-Dtests.seed=C63C9A546CBCAEC5 -Dtests.slow=true -Dtests.locale=zh-TW 
-Dtests.timezone=Asia/Urumqi -Dtests.asserts=true 
-Dtests.file.encoding=ISO-8859-1
   [junit4] FAILURE  212s J0 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: timeout waiting to see 
all nodes active
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([C63C9A546CBCAEC5:4E68A58EC240C33D]:0)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    >        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 994826 INFO  
(SUITE-PeerSyncReplicationTest-seed#[C63C9A546CBCAEC5]-worker) [    ] 
o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_C63C9A546CBCAEC5-001
   [junit4]   2> NOTE: test params are: codec=Lucene70, 
sim=RandomSimilarity(queryNorm=true): {}, locale=zh-TW, timezone=Asia/Urumqi
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_121 
(64-bit)/cpus=3,threads=1,free=57061032,total=245276672
   [junit4]   2> NOTE: All tests run in this JVM: [SolrCoreTest, 
MergeStrategyTest, BlockJoinFacetDistribTest, FastVectorHighlighterTest, 
TestJsonFacetRefinement, CdcrUpdateLogTest, TestSurroundQueryParser, 
SolrMetricReporterTest, TestRequestForwarding, CoreSorterTest, 
TestMergePolicyConfig, TestObjectReleaseTracker, 
CollectionsAPIAsyncDistributedZkTest, PolyFieldTest, SolrPluginUtilsTest, 
TestQuerySenderNoQuery, CopyFieldTest, TermsComponentTest, 
AnalysisAfterCoreReloadTest, ZkNodePropsTest, TestManagedSchemaAPI, 
SortSpecParsingTest, DeleteReplicaTest, PrimitiveFieldTypeTest, 
DocValuesNotIndexedTest, TestDistributedStatsComponentCardinality, 
TestPivotHelperCode, CustomCollectionTest, SparseHLLTest, 
CollectionStateFormat2Test, TestBinaryResponseWriter, 
DistributedQueryElevationComponentTest, SynonymTokenizerTest, 
TestNumericTerms64, DistributedQueueTest, TestDeleteCollectionOnDownNodes, 
ResponseHeaderTest, SuggestComponentContextFilterQueryTest, 
TestSolrDeletionPolicy1, DistributedFacetPivotSmallAdvancedTest, 
BasicAuthStandaloneTest, TestStressLiveNodes, TestConfigSetProperties, 
TestSolrDynamicMBean, TestXmlQParser, HdfsChaosMonkeyNothingIsSafeTest, 
TestLRUCache, UtilsToolTest, DateFieldTest, TestDocumentBuilder, 
TestUniqueKeyFieldResource, TestManagedSynonymFilterFactory, PluginInfoTest, 
SchemaVersionSpecificBehaviorTest, TestConfigReload, SimpleMLTQParserTest, 
UnloadDistributedZkTest, TestSolrJ, TestStressVersions, TestStressReorder, 
BooleanFieldTest, IndexSchemaRuntimeFieldTest, TestSchemaVersionResource, 
CloudExitableDirectoryReaderTest, ParsingFieldUpdateProcessorsTest, 
SystemInfoHandlerTest, TestHdfsCloudBackupRestore, FileUtilsTest, 
BinaryUpdateRequestHandlerTest, TestBinaryField, TestExportWriter, 
OverseerStatusTest, ConcurrentDeleteAndCreateCollectionTest, HttpPartitionTest, 
FacetPivotSmallTest, TestSortByMinMaxFunction, HLLSerializationTest, 
RequiredFieldsTest, TestGroupingSearch, TestCorePropertiesReload, 
TestRemoteStreaming, TestRawTransformer, RecoveryZkTest, 
TestComplexPhraseLeadingWildcard, TestExactSharedStatsCache, 
StatsReloadRaceTest, TestAnalyzeInfixSuggestions, 
SimpleCollectionCreateDeleteTest, JSONWriterTest, DisMaxRequestHandlerTest, 
TestDelegationWithHadoopAuth, MigrateRouteKeyTest, TestExtendedDismaxParser, 
TestJmxIntegration, TestFieldCacheReopen, 
DistribDocExpirationUpdateProcessorTest, ExplicitHLLTest, 
TestSolrFieldCacheMBean, TestJettySolrRunner, SoftAutoCommitTest, 
XsltUpdateRequestHandlerTest, TestRandomFlRTGCloud, TestFastOutputStream, 
DistributedFacetPivotWhiteBoxTest, TestSolrConfigHandlerConcurrent, 
TestFileDictionaryLookup, TestCloudPseudoReturnFields, 
TestDocBasedVersionConstraints, PeerSyncReplicationTest]
   [junit4] Completed [230/680 (1!)] on J0 in 211.80s, 1 test, 1 failure <<< 
FAILURES!

[...truncated 63908 lines...]

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org
For additional commands, e-mail: dev-h...@lucene.apache.org

Reply via email to