Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Linux/18787/
Java: 32bit/jdk1.8.0_112 -server -XX:+UseSerialGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
timeout waiting to see all nodes active

Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
        at 
__randomizedtesting.SeedInfo.seed([2A532E009DD49072:A20711DA3328FD8A]:0)
        at org.junit.Assert.fail(Assert.java:93)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
        at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 12292 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/init-core-data-001
   [junit4]   2> 1414751 INFO  
(SUITE-PeerSyncReplicationTest-seed#[2A532E009DD49072]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: 
@org.apache.solr.util.RandomizeSSL(reason=, ssl=NaN, value=NaN, clientAuth=NaN)
   [junit4]   2> 1414752 INFO  
(SUITE-PeerSyncReplicationTest-seed#[2A532E009DD49072]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 1414754 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1414754 INFO  (Thread-1995) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1414754 INFO  (Thread-1995) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 1414854 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkTestServer start zk server on port:45365
   [junit4]   2> 1414860 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 1414861 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 1414861 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 1414862 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 1414863 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 1414864 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 1414864 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 1414865 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 1414865 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 1414866 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 1414867 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 1414936 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/control-001/cores/collection1
   [junit4]   2> 1414938 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1414938 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@1df5b6c{/,null,AVAILABLE}
   [junit4]   2> 1414941 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@1da02ba{SSL,[ssl, 
http/1.1]}{127.0.0.1:34267}
   [junit4]   2> 1414941 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server Started @1416885ms
   [junit4]   2> 1414941 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/tempDir-001/control/data,
 hostContext=/, hostPort=34267, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/control-001/cores}
   [junit4]   2> 1414942 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1414942 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1414942 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1414942 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1414942 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-16T18:22:48.599Z
   [junit4]   2> 1414944 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1414944 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/control-001/solr.xml
   [junit4]   2> 1414950 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45365/solr
   [junit4]   2> 1414960 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:34267_ 
   ] o.a.s.c.OverseerElectionContext I am going to be the leader 
127.0.0.1:34267_
   [junit4]   2> 1414960 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:34267_ 
   ] o.a.s.c.Overseer Overseer 
(id=97294153705848837-127.0.0.1:34267_-n_0000000000) starting
   [junit4]   2> 1414963 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:34267_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:34267_
   [junit4]   2> 1414964 INFO  
(zkCallback-1448-thread-1-processing-n:127.0.0.1:34267_) [n:127.0.0.1:34267_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1415125 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:34267_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/control-001/cores
   [junit4]   2> 1415125 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:34267_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1415127 INFO  
(OverseerStateUpdate-97294153705848837-127.0.0.1:34267_-n_0000000000) 
[n:127.0.0.1:34267_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 1416134 WARN  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 1416135 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1416166 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1416241 WARN  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1416243 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1416249 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection 
control_collection
   [junit4]   2> 1416249 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/control-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/control-001/cores/collection1/data/]
   [junit4]   2> 1416249 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@33f929
   [junit4]   2> 1416251 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=26, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1416351 WARN  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 1416360 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler 
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1416360 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1416361 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Hard AutoCommit: disabled
   [junit4]   2> 1416361 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Soft AutoCommit: disabled
   [junit4]   2> 1416362 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=13, maxMergeAtOnceExplicit=36, maxMergedSegmentMB=36.087890625, 
floorSegmentMB=0.6708984375, forceMergeDeletesPctAllowed=23.20441957831426, 
segmentsPerTier=50.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 1416362 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@13ff454[collection1] main]
   [junit4]   2> 1416363 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1416363 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1416363 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1416364 INFO  
(searcherExecutor-4083-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@13ff454[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1416365 INFO  
(coreLoadExecutor-4082-thread-1-processing-n:127.0.0.1:34267_) 
[n:127.0.0.1:34267_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1556706460981788672
   [junit4]   2> 1416369 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas 
found to continue.
   [junit4]   2> 1416369 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new 
leader - try and sync
   [junit4]   2> 1416369 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
https://127.0.0.1:34267/collection1/
   [junit4]   2> 1416369 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync 
replicas to me
   [junit4]   2> 1416369 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
https://127.0.0.1:34267/collection1/ has no replicas
   [junit4]   2> 1416370 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new 
leader: https://127.0.0.1:34267/collection1/ shard1
   [junit4]   2> 1416458 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1416459 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45365/solr ready
   [junit4]   2> 1416459 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection 
loss:false
   [junit4]   2> 1416521 INFO  
(coreZkRegister-4075-thread-1-processing-n:127.0.0.1:34267_ x:collection1 
c:control_collection) [n:127.0.0.1:34267_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery 
necessary
   [junit4]   2> 1416529 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001/cores/collection1
   [junit4]   2> 1416530 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001
   [junit4]   2> 1416531 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1416532 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@19187a4{/,null,AVAILABLE}
   [junit4]   2> 1416532 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@9a10e8{SSL,[ssl, 
http/1.1]}{127.0.0.1:36334}
   [junit4]   2> 1416533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server Started @1418476ms
   [junit4]   2> 1416533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/tempDir-001/jetty1,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=36334, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001/cores}
   [junit4]   2> 1416533 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1416533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1416533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1416533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1416533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-16T18:22:50.190Z
   [junit4]   2> 1416535 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1416535 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001/solr.xml
   [junit4]   2> 1416542 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45365/solr
   [junit4]   2> 1416547 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:36334_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1416549 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:36334_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:36334_
   [junit4]   2> 1416549 INFO  
(zkCallback-1448-thread-2-processing-n:127.0.0.1:34267_) [n:127.0.0.1:34267_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1416550 INFO  (zkCallback-1452-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1416550 INFO  
(zkCallback-1457-thread-1-processing-n:127.0.0.1:36334_) [n:127.0.0.1:36334_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1416615 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:36334_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001/cores
   [junit4]   2> 1416615 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:36334_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1416616 INFO  
(OverseerStateUpdate-97294153705848837-127.0.0.1:34267_-n_0000000000) 
[n:127.0.0.1:34267_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 1417624 WARN  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 1417625 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 1417635 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1417726 WARN  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1417728 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 1417734 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 1417735 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 1417735 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@33f929
   [junit4]   2> 1417736 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=26, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1417768 WARN  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1417780 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1417780 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1417780 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 1417780 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 1417781 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=13, maxMergeAtOnceExplicit=36, 
maxMergedSegmentMB=36.087890625, floorSegmentMB=0.6708984375, 
forceMergeDeletesPctAllowed=23.20441957831426, segmentsPerTier=50.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 1417781 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@75c7da[collection1] main]
   [junit4]   2> 1417782 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1417782 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1417782 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 1417783 INFO  
(searcherExecutor-4094-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@75c7da[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1417784 INFO  
(coreLoadExecutor-4093-thread-1-processing-n:127.0.0.1:36334_) 
[n:127.0.0.1:36334_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1556706462469718016
   [junit4]   2> 1417787 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to 
continue.
   [junit4]   2> 1417787 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try 
and sync
   [junit4]   2> 1417788 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
https://127.0.0.1:36334/collection1/
   [junit4]   2> 1417788 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 1417788 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy https://127.0.0.1:36334/collection1/ has no 
replicas
   [junit4]   2> 1417789 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: 
https://127.0.0.1:36334/collection1/ shard1
   [junit4]   2> 1417940 INFO  
(coreZkRegister-4088-thread-1-processing-n:127.0.0.1:36334_ x:collection1 
c:collection1) [n:127.0.0.1:36334_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1418038 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001/cores/collection1
   [junit4]   2> 1418039 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001
   [junit4]   2> 1418040 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1418040 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@d06e77{/,null,AVAILABLE}
   [junit4]   2> 1418041 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@13b7c89{SSL,[ssl, 
http/1.1]}{127.0.0.1:43473}
   [junit4]   2> 1418041 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server Started @1419985ms
   [junit4]   2> 1418041 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/tempDir-001/jetty2,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=43473, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001/cores}
   [junit4]   2> 1418041 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1418042 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1418042 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1418042 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1418042 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-16T18:22:51.699Z
   [junit4]   2> 1418044 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1418044 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001/solr.xml
   [junit4]   2> 1418050 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45365/solr
   [junit4]   2> 1418055 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:43473_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1418057 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:43473_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:43473_
   [junit4]   2> 1418058 INFO  
(zkCallback-1463-thread-1-processing-n:127.0.0.1:43473_) [n:127.0.0.1:43473_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1418058 INFO  (zkCallback-1452-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1418058 INFO  
(zkCallback-1448-thread-2-processing-n:127.0.0.1:34267_) [n:127.0.0.1:34267_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1418058 INFO  
(zkCallback-1457-thread-1-processing-n:127.0.0.1:36334_) [n:127.0.0.1:36334_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1418121 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:43473_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001/cores
   [junit4]   2> 1418121 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:43473_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1418123 INFO  
(OverseerStateUpdate-97294153705848837-127.0.0.1:34267_-n_0000000000) 
[n:127.0.0.1:34267_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 1419130 WARN  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 1419130 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 1419141 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1419214 WARN  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1419231 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 1419237 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 1419238 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 1419238 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@33f929
   [junit4]   2> 1419239 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=26, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1419261 WARN  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1419272 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1419272 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1419272 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 1419272 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 1419273 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=13, maxMergeAtOnceExplicit=36, 
maxMergedSegmentMB=36.087890625, floorSegmentMB=0.6708984375, 
forceMergeDeletesPctAllowed=23.20441957831426, segmentsPerTier=50.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 1419273 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@a3bd5b[collection1] main]
   [junit4]   2> 1419274 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1419274 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1419274 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 1419275 INFO  
(searcherExecutor-4105-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
c:collection1) [n:127.0.0.1:43473_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@a3bd5b[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1419275 INFO  
(coreLoadExecutor-4104-thread-1-processing-n:127.0.0.1:43473_) 
[n:127.0.0.1:43473_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1556706464033144832
   [junit4]   2> 1419279 INFO  
(coreZkRegister-4099-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
c:collection1) [n:127.0.0.1:43473_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 1419279 INFO  
(updateExecutor-1460-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 1419279 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 1419279 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 1419279 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 1419279 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 1419279 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [https://127.0.0.1:36334/collection1/] 
and I am [https://127.0.0.1:43473/collection1/]
   [junit4]   2> 1419280 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [https://127.0.0.1:36334]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:43473_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 1419340 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 1419340 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 1419340 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:43473_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:43473","node_name":"127.0.0.1:43473_","state":"down"}
   [junit4]   2> 1419523 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001/cores/collection1
   [junit4]   2> 1419523 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001
   [junit4]   2> 1419524 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1419525 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@1387dfb{/,null,AVAILABLE}
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@12f10f8{SSL,[ssl, 
http/1.1]}{127.0.0.1:33631}
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.Server Started @1421469ms
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/tempDir-001/jetty3,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=33631, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001/cores}
   [junit4]   2> 1419526 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1419526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-16T18:22:53.183Z
   [junit4]   2> 1419528 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1419528 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001/solr.xml
   [junit4]   2> 1419534 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45365/solr
   [junit4]   2> 1419539 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:33631_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1419540 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:33631_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:33631_
   [junit4]   2> 1419541 INFO  
(zkCallback-1463-thread-1-processing-n:127.0.0.1:43473_) [n:127.0.0.1:43473_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1419541 INFO  
(zkCallback-1448-thread-1-processing-n:127.0.0.1:34267_) [n:127.0.0.1:34267_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1419541 INFO  (zkCallback-1452-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1419541 INFO  
(zkCallback-1457-thread-1-processing-n:127.0.0.1:36334_) [n:127.0.0.1:36334_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1419542 INFO  
(zkCallback-1470-thread-1-processing-n:127.0.0.1:33631_) [n:127.0.0.1:33631_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1419649 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:33631_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001/cores
   [junit4]   2> 1419649 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [n:127.0.0.1:33631_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1419650 INFO  
(OverseerStateUpdate-97294153705848837-127.0.0.1:34267_-n_0000000000) 
[n:127.0.0.1:34267_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 1420340 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:43473_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:43473","node_name":"127.0.0.1:43473_","state":"recovering"}
   [junit4]   2> 1420340 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 1420340 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:43473_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1000
   [junit4]   2> 1420657 WARN  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 1420658 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 1420668 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1420749 WARN  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1420751 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 1420758 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 1420758 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 1420758 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@33f929
   [junit4]   2> 1420760 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=26, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1420799 WARN  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1420826 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1420826 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1420827 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 1420827 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 1420827 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=13, maxMergeAtOnceExplicit=36, 
maxMergedSegmentMB=36.087890625, floorSegmentMB=0.6708984375, 
forceMergeDeletesPctAllowed=23.20441957831426, segmentsPerTier=50.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 1420828 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@40e7f1[collection1] main]
   [junit4]   2> 1420828 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1420829 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1420829 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 1420830 INFO  
(searcherExecutor-4116-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
c:collection1) [n:127.0.0.1:33631_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@40e7f1[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1420830 INFO  
(coreLoadExecutor-4115-thread-1-processing-n:127.0.0.1:33631_) 
[n:127.0.0.1:33631_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1556706465663680512
   [junit4]   2> 1420833 INFO  
(coreZkRegister-4110-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
c:collection1) [n:127.0.0.1:33631_ c:collection1 s:shard1 r:core_node3 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 1420833 INFO  
(updateExecutor-1467-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 1420834 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 1420834 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 1420834 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 1420834 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 1420834 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [https://127.0.0.1:36334/collection1/] 
and I am [https://127.0.0.1:33631/collection1/]
   [junit4]   2> 1420835 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [https://127.0.0.1:36334]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:33631_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 1420837 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 1420838 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 1420838 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:33631_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:33631","node_name":"127.0.0.1:33631_","state":"down"}
   [junit4]   2> 1420986 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 1420986 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 
for each attempt
   [junit4]   2> 1420986 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: 
collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 1421838 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:33631_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:33631","node_name":"127.0.0.1:33631_","state":"recovering"}
   [junit4]   2> 1421838 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 1421838 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:33631_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1000
   [junit4]   2> 1427341 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [https://127.0.0.1:36334/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 1427341 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=https://127.0.0.1:43473 START 
replicas=[https://127.0.0.1:36334/collection1/] nUpdates=1000
   [junit4]   2> 1427344 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1427344 INFO  (qtp31796715-8730) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 1427344 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1427344 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 1427344 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1427344 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 1427345 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1427345 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 1427345 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 1427345 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 1427345 INFO  
(recoveryExecutor-1461-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 1428839 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [https://127.0.0.1:36334/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 1428839 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=https://127.0.0.1:33631 START 
replicas=[https://127.0.0.1:36334/collection1/] nUpdates=1000
   [junit4]   2> 1428841 INFO  (qtp31796715-8737) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1428841 INFO  (qtp31796715-8737) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 1428842 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1428842 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 1428842 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1428843 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 1428843 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1428843 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 1428843 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 1428843 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 1428843 INFO  
(recoveryExecutor-1468-thread-1-processing-n:127.0.0.1:33631_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:33631_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 1428987 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 1429043 INFO  (qtp5471852-8694) [n:127.0.0.1:34267_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1429043 INFO  (qtp5471852-8694) [n:127.0.0.1:34267_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1429044 INFO  (qtp5471852-8694) [n:127.0.0.1:34267_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1429044 INFO  (qtp5471852-8694) [n:127.0.0.1:34267_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 1
   [junit4]   2> 1429049 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1429049 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1429050 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1429050 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:36334/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 1429119 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1429119 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1429120 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1429120 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:36334/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 1429123 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1429123 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1429123 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1429123 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:36334/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 1429124 INFO  (qtp31796715-8735) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 78
   [junit4]   2> 1429126 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 1429128 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 1429130 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 1431132 INFO  (qtp5471852-8699) [n:127.0.0.1:34267_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1556706476465061888)} 0 1
   [junit4]   2> 1431136 INFO  (qtp18423979-8763) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&_version_=-1556706476467159040&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1556706476467159040)} 0 0
   [junit4]   2> 1431136 INFO  (qtp25191344-8798) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&_version_=-1556706476467159040&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1556706476467159040)} 0 0
   [junit4]   2> 1431136 INFO  (qtp31796715-8737) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1556706476467159040)} 0 3
   [junit4]   2> 1431139 INFO  (qtp25191344-8799) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[0
 (1556706476471353344)]} 0 0
   [junit4]   2> 1431139 INFO  (qtp18423979-8764) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[0
 (1556706476471353344)]} 0 0
   [junit4]   2> 1431140 INFO  (qtp31796715-8736) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[0 (1556706476471353344)]} 0 3
   [junit4]   2> 1431141 INFO  (qtp25191344-8800) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[1
 (1556706476474499072)]} 0 0
   [junit4]   2> 1431141 INFO  (qtp18423979-8765) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[1
 (1556706476474499072)]} 0 0
   [junit4]   2> 1431141 INFO  (qtp31796715-8733) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[1 (1556706476474499072)]} 0 0
   [junit4]   2> 1431142 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[2
 (1556706476476596224)]} 0 0
   [junit4]   2> 1431142 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[2
 (1556706476476596224)]} 0 0
   [junit4]   2> 1431143 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[2 (1556706476476596224)]} 0 0
   [junit4]   2> 1431144 INFO  (qtp18423979-8763) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[3
 (1556706476477644800)]} 0 0
   [junit4]   2> 1431144 INFO  (qtp25191344-8798) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[3
 (1556706476477644800)]} 0 0
   [junit4]   2> 1431144 INFO  (qtp31796715-8737) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[3 (1556706476477644800)]} 0 1
   [junit4]   2> 1431145 INFO  (qtp18423979-8764) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[4
 (1556706476478693376)]} 0 0
   [junit4]   2> 1431145 INFO  (qtp25191344-8799) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[4
 (1556706476478693376)]} 0 0
   [junit4]   2> 1431145 INFO  (qtp31796715-8736) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[4 (1556706476478693376)]} 0 1
   [junit4]   2> 1431147 INFO  (qtp18423979-8765) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[5
 (1556706476480790528)]} 0 1
   [junit4]   2> 1431147 INFO  (qtp25191344-8800) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[5
 (1556706476480790528)]} 0 1
   [junit4]   2> 1431148 INFO  (qtp31796715-8733) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[5 (1556706476480790528)]} 0 1
   [junit4]   2> 1431149 INFO  (qtp25191344-8793) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[6
 (1556706476482887680)]} 0 0
   [junit4]   2> 1431149 INFO  (qtp18423979-8758) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[6
 (1556706476482887680)]} 0 0
   [junit4]   2> 1431149 INFO  (qtp31796715-8731) [n:127.0.0.1:36334_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[6 (1556706476482887680)]} 0 0
   [junit4]   2> 1431150 INFO  (qtp25191344-8798) [n:127.0.0.1:33631_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt=javabin&version=2}{add=[7
 (1556706476483936256)]} 0 0
   [junit4]   2> 1431150 INFO  (qtp18423979-8763) [n:127.0.0.1:43473_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:36334/collection1/&wt

[...truncated too long message...]

lection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 
s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Finished recovery 
process, successful=[false]
   [junit4]   2> 1621558 INFO  
(recoveryExecutor-1475-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore 
org.apache.solr.core.SolrCore@1daca9f
   [junit4]   2> 1621558 WARN  
(recoveryExecutor-1475-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Stopping recovery for 
core=[collection1] coreNodeName=[core_node2]
   [junit4]   2> 1621591 INFO  
(recoveryExecutor-1475-thread-1-processing-n:127.0.0.1:43473_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:43473_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters 
for: solr.core.collection1
   [junit4]   2> 1621591 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.Overseer Overseer (id=97294153705848850-127.0.0.1:43473_-n_0000000004) 
closing
   [junit4]   2> 1621591 INFO  
(OverseerStateUpdate-97294153705848850-127.0.0.1:43473_-n_0000000004) 
[n:127.0.0.1:43473_    ] o.a.s.c.Overseer Overseer Loop exiting : 
127.0.0.1:43473_
   [junit4]   2> 1621592 WARN  
(zkCallback-1477-thread-2-processing-n:127.0.0.1:43473_) [n:127.0.0.1:43473_    
] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to 
ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 1621592 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 1621593 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.AbstractConnector Stopped ServerConnector@1f3dabe{SSL,[ssl, 
http/1.1]}{127.0.0.1:43473}
   [junit4]   2> 1621593 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@18dc2f4{/,null,UNAVAILABLE}
   [junit4]   2> 1621593 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ChaosMonkey monkey: stop shard! 33631
   [junit4]   2> 1621593 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[2A532E009DD49072]) [    ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1:45365 45365
   [junit4]   2> 1621646 INFO  (Thread-1995) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1:45365 45365
   [junit4]   2> 1621647 WARN  (Thread-1995) [    ] o.a.s.c.ZkTestServer Watch 
limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/aliases.json
   [junit4]   2>        5       /solr/security.json
   [junit4]   2>        5       /solr/configs/conf1
   [junit4]   2>        4       /solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/clusterstate.json
   [junit4]   2>        6       /solr/clusterprops.json
   [junit4]   2>        2       
/solr/collections/collection1/leader_elect/shard1/election/97294153705848841-core_node1-n_0000000000
   [junit4]   2>        2       
/solr/overseer_elect/election/97294153705848841-127.0.0.1:36334_-n_0000000001
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2>        207     /solr/overseer/collection-queue-work
   [junit4]   2>        24      /solr/overseer/queue
   [junit4]   2>        6       /solr/collections
   [junit4]   2>        6       /solr/overseer/queue-work
   [junit4]   2>        5       /solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test 
-Dtests.seed=2A532E009DD49072 -Dtests.multiplier=3 -Dtests.slow=true 
-Dtests.locale=lv-LV -Dtests.timezone=Pacific/Noumea -Dtests.asserts=true 
-Dtests.file.encoding=US-ASCII
   [junit4] FAILURE  207s J0 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: timeout waiting to see 
all nodes active
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([2A532E009DD49072:A20711DA3328FD8A]:0)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    >        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 1621650 INFO  
(SUITE-PeerSyncReplicationTest-seed#[2A532E009DD49072]-worker) [    ] 
o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.PeerSyncReplicationTest_2A532E009DD49072-001
   [junit4]   2> Jan 16, 2017 6:26:15 PM 
com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 2 leaked 
thread(s).
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): 
{other_tl1=Lucene50(blocksize=128), 
range_facet_l_dv=PostingsFormat(name=LuceneFixedGap), rnd_s=FSTOrd50, 
multiDefault=FSTOrd50, intDefault=Lucene50(blocksize=128), 
a_i1=Lucene50(blocksize=128), range_facet_l=Lucene50(blocksize=128), 
_version_=Lucene50(blocksize=128), a_t=Lucene50(blocksize=128), 
id=PostingsFormat(name=LuceneFixedGap), 
range_facet_i_dv=Lucene50(blocksize=128), 
text=PostingsFormat(name=LuceneVarGapDocFreqInterval), 
timestamp=Lucene50(blocksize=128)}, 
docValues:{range_facet_l_dv=DocValuesFormat(name=Asserting), 
range_facet_i_dv=DocValuesFormat(name=Memory), 
timestamp=DocValuesFormat(name=Memory)}, maxPointsInLeafNode=1716, 
maxMBSortInHeap=5.6936166999247355, sim=RandomSimilarity(queryNorm=false): {}, 
locale=lv-LV, timezone=Pacific/Noumea
   [junit4]   2> NOTE: Linux 4.4.0-53-generic i386/Oracle Corporation 1.8.0_112 
(32-bit)/cpus=12,threads=1,free=195885312,total=375824384
   [junit4]   2> NOTE: All tests run in this JVM: [SolrSlf4jReporterTest, 
UniqFieldsUpdateProcessorFactoryTest, TestXmlQParser, TestLRUCache, 
DistributedDebugComponentTest, PingRequestHandlerTest, BasicZkTest, 
SuggestComponentContextFilterQueryTest, 
OverriddenZkACLAndCredentialsProvidersTest, MinimalSchemaTest, TestInitQParser, 
TestJavabinTupleStreamParser, SolrMetricManagerTest, TestSolr4Spatial2, 
TestDefaultSearchFieldResource, NumericFieldsTest, QueryResultKeyTest, 
CacheHeaderTest, TestManagedStopFilterFactory, TestFieldCollectionResource, 
ZkStateReaderTest, TestSQLHandler, TestSolrCloudWithSecureImpersonation, 
StandardRequestHandlerTest, TestConfigSetImmutable, ScriptEngineTest, 
TestExportWriter, ConcurrentDeleteAndCreateCollectionTest, 
TestSolrCoreProperties, LeaderFailureAfterFreshStartTest, PeerSyncTest, 
TestFunctionQuery, NotRequiredUniqueKeyTest, TestFieldCacheWithThreads, 
SecurityConfHandlerTest, TestValueSourceCache, TestSizeLimitedDistributedMap, 
TestBadConfig, AnalysisAfterCoreReloadTest, PolyFieldTest, 
DistribDocExpirationUpdateProcessorTest, CdcrVersionReplicationTest, 
TestStandardQParsers, JavabinLoaderTest, RankQueryTest, 
DocExpirationUpdateProcessorFactoryTest, TestFaceting, 
TestDFRSimilarityFactory, TestCloudManagedSchema, TestJmxIntegration, 
TestImplicitCoreProperties, HdfsDirectoryTest, 
BigEndianAscendingWordDeserializerTest, TestSolrDeletionPolicy2, 
PreAnalyzedUpdateProcessorTest, DocValuesMissingTest, 
ClassificationUpdateProcessorIntegrationTest, TestUseDocValuesAsStored, 
PeerSyncWithIndexFingerprintCachingTest, TestLazyCores, 
DistributedQueryComponentOptimizationTest, TestOmitPositions, TestFastWriter, 
TestStressLucene, TestSSLRandomization, TestSortByMinMaxFunction, 
TestCrossCoreJoin, MigrateRouteKeyTest, ResponseLogComponentTest, 
TestFileDictionaryLookup, TestCloudInspectUtil, TestQueryWrapperFilter, 
TestFieldTypeResource, TestCryptoKeys, TestComplexPhraseQParserPlugin, 
XsltUpdateRequestHandlerTest, DistributedQueryElevationComponentTest, 
DateMathParserTest, TestExceedMaxTermLength, UUIDFieldTest, BooleanFieldTest, 
FileBasedSpellCheckerTest, HdfsRecoveryZkTest, BlockCacheTest, 
SystemInfoHandlerTest, CdcrReplicationDistributedZkTest, SolrRequestParserTest, 
TestSlowCompositeReaderWrapper, ClusterStateTest, TestCodecSupport, 
BlockJoinFacetRandomTest, TestConfigSetsAPI, 
AddSchemaFieldsUpdateProcessorFactoryTest, TestSolrCloudSnapshots, 
TestSubQueryTransformer, TestSha256AuthenticationProvider, DeleteShardTest, 
TestRealTimeGet, TestCSVLoader, TestLocalFSCloudBackupRestore, 
TestSearcherReuse, DocumentAnalysisRequestHandlerTest, 
AnalysisErrorHandlingTest, TestDistributedSearch, ActionThrottleTest, 
DefaultValueUpdateProcessorTest, RollingRestartTest, 
DistributedVersionInfoTest, TestLFUCache, SmileWriterTest, 
BasicDistributedZkTest, LeaderElectionIntegrationTest, TestRandomFaceting, 
TestRandomDVFaceting, DistributedSpellCheckComponentTest, 
TermVectorComponentDistributedTest, TestRangeQuery, AssignTest, 
AsyncCallRequestStatusResponseTest, CdcrReplicationHandlerTest, 
CdcrRequestHandlerTest, CloudExitableDirectoryReaderTest, 
CollectionTooManyReplicasTest, CollectionsAPISolrJTest, 
DocValuesNotIndexedTest, ForceLeaderTest, PeerSyncReplicationTest]
   [junit4] Completed [507/678 (1!)] on J0 in 207.67s, 1 test, 1 failure <<< 
FAILURES!

[...truncated 55230 lines...]

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to