Build: https://jenkins.thetaphi.de/job/Lucene-Solr-6.4-Linux/145/
Java: 64bit/jdk1.8.0_121 -XX:-UseCompressedOops -XX:+UseSerialGC

1 tests failed.
FAILED:  org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test

Error Message:
Expected 2 of 3 replicas to be active but only found 1; 
[core_node3:{"core":"c8n_1x3_lf_shard1_replica3","base_url":"http://127.0.0.1:39228","node_name":"127.0.0.1:39228_","state":"active","leader":"true"}];
 clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/33)={   
"replicationFactor":"3",   "shards":{"shard1":{       
"range":"80000000-7fffffff",       "state":"active",       "replicas":{         
"core_node1":{           "state":"down",           
"base_url":"http://127.0.0.1:44840";,           
"core":"c8n_1x3_lf_shard1_replica2",           "node_name":"127.0.0.1:44840_"}, 
        "core_node2":{           "core":"c8n_1x3_lf_shard1_replica1",           
"base_url":"http://127.0.0.1:45475";,           "node_name":"127.0.0.1:45475_",  
         "state":"down"},         "core_node3":{           
"core":"c8n_1x3_lf_shard1_replica3",           
"base_url":"http://127.0.0.1:39228";,           "node_name":"127.0.0.1:39228_",  
         "state":"active",           "leader":"true"}}}},   
"router":{"name":"compositeId"},   "maxShardsPerNode":"1",   
"autoAddReplicas":"false"}

Stack Trace:
java.lang.AssertionError: Expected 2 of 3 replicas to be active but only found 
1; 
[core_node3:{"core":"c8n_1x3_lf_shard1_replica3","base_url":"http://127.0.0.1:39228","node_name":"127.0.0.1:39228_","state":"active","leader":"true"}];
 clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/33)={
  "replicationFactor":"3",
  "shards":{"shard1":{
      "range":"80000000-7fffffff",
      "state":"active",
      "replicas":{
        "core_node1":{
          "state":"down",
          "base_url":"http://127.0.0.1:44840";,
          "core":"c8n_1x3_lf_shard1_replica2",
          "node_name":"127.0.0.1:44840_"},
        "core_node2":{
          "core":"c8n_1x3_lf_shard1_replica1",
          "base_url":"http://127.0.0.1:45475";,
          "node_name":"127.0.0.1:45475_",
          "state":"down"},
        "core_node3":{
          "core":"c8n_1x3_lf_shard1_replica3",
          "base_url":"http://127.0.0.1:39228";,
          "node_name":"127.0.0.1:39228_",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"1",
  "autoAddReplicas":"false"}
        at 
__randomizedtesting.SeedInfo.seed([AFDCF4D88650EC2C:2788CB0228AC81D4]:0)
        at org.junit.Assert.fail(Assert.java:93)
        at org.junit.Assert.assertTrue(Assert.java:43)
        at 
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.testRf3WithLeaderFailover(LeaderFailoverAfterPartitionTest.java:168)
        at 
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test(LeaderFailoverAfterPartitionTest.java:55)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
        at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 10928 lines...]
   [junit4] Suite: org.apache.solr.cloud.LeaderFailoverAfterPartitionTest
   [junit4]   2> Creating dataDir: 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/init-core-data-001
   [junit4]   2> 10645 INFO  
(SUITE-LeaderFailoverAfterPartitionTest-seed#[AFDCF4D88650EC2C]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: 
@org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 10645 INFO  
(SUITE-LeaderFailoverAfterPartitionTest-seed#[AFDCF4D88650EC2C]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 10655 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 10656 INFO  (Thread-35) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 10656 INFO  (Thread-35) [    ] o.a.s.c.ZkTestServer Starting 
server
   [junit4]   2> 10756 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkTestServer start zk server on port:37923
   [junit4]   2> 10873 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 10876 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 10883 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 10886 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 10887 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 10889 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 10891 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 10897 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 10899 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 10901 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 10903 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 10983 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/control-001/cores/collection1
   [junit4]   2> 10985 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 10986 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4ca026b8{/,null,AVAILABLE}
   [junit4]   2> 10988 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@67904708{HTTP/1.1,[http/1.1]}{127.0.0.1:44007}
   [junit4]   2> 10988 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server Started @13048ms
   [junit4]   2> 10988 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/tempDir-001/control/data,
 hostContext=/, hostPort=39228, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/control-001/cores}
   [junit4]   2> 10988 ERROR 
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 10988 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.4.2
   [junit4]   2> 10989 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 10989 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 10989 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-06T18:29:32.548Z
   [junit4]   2> 10989 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SolrResourceLoader solr home defaulted to 'solr/' (could not find 
system property or JNDI)
   [junit4]   2> 10993 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 10993 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/control-001/solr.xml
   [junit4]   2> 11000 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 11002 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37923/solr
   [junit4]   2> 11092 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:39228_    ] o.a.s.c.OverseerElectionContext I am going to be the 
leader 127.0.0.1:39228_
   [junit4]   2> 11093 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:39228_    ] o.a.s.c.Overseer Overseer 
(id=97571633381965828-127.0.0.1:39228_-n_0000000000) starting
   [junit4]   2> 11164 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:39228_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:39228_
   [junit4]   2> 11167 INFO  
(OverseerStateUpdate-97571633381965828-127.0.0.1:39228_-n_0000000000) 
[n:127.0.0.1:39228_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (1)
   [junit4]   2> 11297 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:39228_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/control-001/cores
   [junit4]   2> 11298 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:39228_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 11339 INFO  
(OverseerStateUpdate-97571633381965828-127.0.0.1:39228_-n_0000000000) 
[n:127.0.0.1:39228_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 12351 WARN  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 12352 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 6.4.2
   [junit4]   2> 12389 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 12563 WARN  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 12566 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 12599 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection 
control_collection
   [junit4]   2> 12599 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/control-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/control-001/cores/collection1/data/]
   [junit4]   2> 12600 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2d3b71f8
   [junit4]   2> 12601 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: 
minMergeSize=1000, mergeFactor=6, maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.4091786280366779]
   [junit4]   2> 12614 WARN  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 12627 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler 
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 12627 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 12628 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Hard AutoCommit: disabled
   [junit4]   2> 12628 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Soft AutoCommit: disabled
   [junit4]   2> 12629 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=31, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 12629 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@1925a5d6[collection1] main]
   [junit4]   2> 12632 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 12633 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 12633 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 12636 INFO  
(searcherExecutor-71-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@1925a5d6[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 12636 INFO  
(coreLoadExecutor-70-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1561146136141496320
   [junit4]   2> 12651 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas 
found to continue.
   [junit4]   2> 12651 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new 
leader - try and sync
   [junit4]   2> 12651 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:39228/collection1/
   [junit4]   2> 12652 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync 
replicas to me
   [junit4]   2> 12652 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
http://127.0.0.1:39228/collection1/ has no replicas
   [junit4]   2> 12673 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new 
leader: http://127.0.0.1:39228/collection1/ shard1
   [junit4]   2> 12824 INFO  
(coreZkRegister-63-thread-1-processing-n:127.0.0.1:39228_ x:collection1 
c:control_collection) [n:127.0.0.1:39228_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery 
necessary
   [junit4]   2> 12855 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 12856 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37923/solr ready
   [junit4]   2> 12858 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection 
loss:false
   [junit4]   2> 12925 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores/collection1
   [junit4]   2> 12925 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001
   [junit4]   2> 12926 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 12927 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@2d0573fc{/,null,AVAILABLE}
   [junit4]   2> 12929 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@6a296877{HTTP/1.1,[http/1.1]}{127.0.0.1:40154}
   [junit4]   2> 12929 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server Started @14989ms
   [junit4]   2> 12929 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/tempDir-001/jetty1,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=45475, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores}
   [junit4]   2> 12929 ERROR 
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 12929 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.4.2
   [junit4]   2> 12929 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 12930 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 12930 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-06T18:29:34.489Z
   [junit4]   2> 12934 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 12934 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/solr.xml
   [junit4]   2> 12935 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] 
o.a.z.s.NIOServerCnxn caught end of stream exception
   [junit4]   2> EndOfStreamException: Unable to read additional data from 
client sessionid 0x15aa4e233e30006, likely client has closed socket
   [junit4]   2>        at 
org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:228)
   [junit4]   2>        at 
org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:208)
   [junit4]   2>        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 12942 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 12943 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37923/solr
   [junit4]   2> 12965 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:45475_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (1)
   [junit4]   2> 12977 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:45475_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:45475_
   [junit4]   2> 12978 INFO  (zkCallback-21-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 12979 INFO  
(zkCallback-17-thread-2-processing-n:127.0.0.1:39228_) [n:127.0.0.1:39228_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 12983 INFO  
(zkCallback-26-thread-1-processing-n:127.0.0.1:45475_) [n:127.0.0.1:45475_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 13165 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:45475_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores
   [junit4]   2> 13165 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:45475_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 13168 INFO  
(OverseerStateUpdate-97571633381965828-127.0.0.1:39228_-n_0000000000) 
[n:127.0.0.1:39228_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard2
   [junit4]   2> 14180 WARN  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 14181 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.4.2
   [junit4]   2> 14197 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 14297 WARN  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 14300 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 14312 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 14313 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 14313 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@2d3b71f8
   [junit4]   2> 14314 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=6, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.4091786280366779]
   [junit4]   2> 14320 WARN  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 14333 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 14333 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 14334 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 14334 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 14334 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=31, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 14335 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@4586d812[collection1] main]
   [junit4]   2> 14336 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 14336 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 14336 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 14338 INFO  
(searcherExecutor-82-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@4586d812[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 14339 INFO  
(coreLoadExecutor-81-thread-1-processing-n:127.0.0.1:45475_) 
[n:127.0.0.1:45475_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1561146137927221248
   [junit4]   2> 14346 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to 
continue.
   [junit4]   2> 14346 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try 
and sync
   [junit4]   2> 14346 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:45475/collection1/
   [junit4]   2> 14346 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 14346 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:45475/collection1/ has no 
replicas
   [junit4]   2> 14365 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:45475/collection1/ shard2
   [junit4]   2> 14517 INFO  
(coreZkRegister-76-thread-1-processing-n:127.0.0.1:45475_ x:collection1 
c:collection1) [n:127.0.0.1:45475_ c:collection1 s:shard2 r:core_node1 
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 14746 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001/cores/collection1
   [junit4]   2> 14747 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001
   [junit4]   2> 14748 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 14749 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@1b5d4670{/,null,AVAILABLE}
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@80fc1e{HTTP/1.1,[http/1.1]}{127.0.0.1:41342}
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server Started @16810ms
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/tempDir-001/jetty2,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=46191, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001/cores}
   [junit4]   2> 14750 ERROR 
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.4.2
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 14750 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-06T18:29:36.309Z
   [junit4]   2> 14754 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 14754 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001/solr.xml
   [junit4]   2> 14762 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 14763 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37923/solr
   [junit4]   2> 14806 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:46191_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (2)
   [junit4]   2> 14810 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:46191_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:46191_
   [junit4]   2> 14811 INFO  
(zkCallback-26-thread-1-processing-n:127.0.0.1:45475_) [n:127.0.0.1:45475_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 14811 INFO  
(zkCallback-17-thread-1-processing-n:127.0.0.1:39228_) [n:127.0.0.1:39228_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 14812 INFO  
(zkCallback-32-thread-1-processing-n:127.0.0.1:46191_) [n:127.0.0.1:46191_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 14811 INFO  (zkCallback-21-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 14859 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:46191_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001/cores
   [junit4]   2> 14859 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:46191_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 14862 INFO  
(OverseerStateUpdate-97571633381965828-127.0.0.1:39228_-n_0000000000) 
[n:127.0.0.1:39228_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 15873 WARN  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 15874 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.4.2
   [junit4]   2> 15903 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 15992 WARN  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 15995 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 16006 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 16006 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 16006 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@2d3b71f8
   [junit4]   2> 16009 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=6, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.4091786280366779]
   [junit4]   2> 16014 WARN  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 16026 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 16026 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 16027 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 16027 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 16027 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=31, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 16028 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@5b1b7c41[collection1] main]
   [junit4]   2> 16029 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 16029 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 16030 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 16031 INFO  
(searcherExecutor-93-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@5b1b7c41[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 16032 INFO  
(coreLoadExecutor-92-thread-1-processing-n:127.0.0.1:46191_) 
[n:127.0.0.1:46191_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1561146139702460416
   [junit4]   2> 16037 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to 
continue.
   [junit4]   2> 16037 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try 
and sync
   [junit4]   2> 16037 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:46191/collection1/
   [junit4]   2> 16037 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 16037 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:46191/collection1/ has no 
replicas
   [junit4]   2> 16042 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:46191/collection1/ shard1
   [junit4]   2> 16192 INFO  
(coreZkRegister-87-thread-1-processing-n:127.0.0.1:46191_ x:collection1 
c:collection1) [n:127.0.0.1:46191_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 16504 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001/cores/collection1
   [junit4]   2> 16505 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001
   [junit4]   2> 16506 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 16507 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@39935198{/,null,AVAILABLE}
   [junit4]   2> 16508 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@3b192c7c{HTTP/1.1,[http/1.1]}{127.0.0.1:38819}
   [junit4]   2> 16508 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.Server Started @18568ms
   [junit4]   2> 16508 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/tempDir-001/jetty3,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=44840, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001/cores}
   [junit4]   2> 16508 ERROR 
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 16508 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.4.2
   [junit4]   2> 16508 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 16508 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 16509 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-06T18:29:38.067Z
   [junit4]   2> 16512 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 16512 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001/solr.xml
   [junit4]   2> 16518 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 16519 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37923/solr
   [junit4]   2> 16545 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:44840_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (3)
   [junit4]   2> 16549 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:44840_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:44840_
   [junit4]   2> 16550 INFO  
(zkCallback-26-thread-1-processing-n:127.0.0.1:45475_) [n:127.0.0.1:45475_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 16550 INFO  
(zkCallback-17-thread-2-processing-n:127.0.0.1:39228_) [n:127.0.0.1:39228_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 16551 INFO  
(zkCallback-38-thread-1-processing-n:127.0.0.1:44840_) [n:127.0.0.1:44840_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 16551 INFO  (zkCallback-21-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 16553 INFO  
(zkCallback-32-thread-1-processing-n:127.0.0.1:46191_) [n:127.0.0.1:46191_    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 16622 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:44840_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001/cores
   [junit4]   2> 16622 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) 
[n:127.0.0.1:44840_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 16625 INFO  
(OverseerStateUpdate-97571633381965828-127.0.0.1:39228_-n_0000000000) 
[n:127.0.0.1:39228_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard2
   [junit4]   2> 17634 WARN  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 17634 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.4.2
   [junit4]   2> 17649 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 17749 WARN  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 17752 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 17764 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 17764 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 17764 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@2d3b71f8
   [junit4]   2> 17765 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=6, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.4091786280366779]
   [junit4]   2> 17771 WARN  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 17784 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 17785 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 17785 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 17785 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 17786 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=31, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 17786 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@11a76387[collection1] main]
   [junit4]   2> 17787 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 17788 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 17788 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 17789 INFO  
(searcherExecutor-104-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
c:collection1) [n:127.0.0.1:44840_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@11a76387[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 17790 INFO  
(coreLoadExecutor-103-thread-1-processing-n:127.0.0.1:44840_) 
[n:127.0.0.1:44840_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1561146141545857024
   [junit4]   2> 17794 INFO  
(coreZkRegister-98-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
c:collection1) [n:127.0.0.1:44840_ c:collection1 s:shard2 r:core_node3 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 17794 INFO  
(updateExecutor-35-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 17798 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 17798 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 17799 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 17799 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 17799 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [http://127.0.0.1:45475/collection1/] 
and I am [http://127.0.0.1:44840/collection1/]
   [junit4]   2> 17803 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [http://127.0.0.1:45475]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:44840_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 17804 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51448,localport=45475], receiveBufferSize:531000
   [junit4]   2> 17807 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=40154,localport=55774], receiveBufferSize=530904
   [junit4]   2> 17809 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 17810 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard2 of collection1) have state: recovering
   [junit4]   2> 17810 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard2, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:44840_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:44840","node_name":"127.0.0.1:44840_","state":"down"}
   [junit4]   2> 18137 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 18138 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 
30000 for each attempt
   [junit4]   2> 18138 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: 
collection1 failOnTimeout:true timeout (sec):30000
   [junit4]   2> 18810 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard2, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:44840_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:44840","node_name":"127.0.0.1:44840_","state":"recovering"}
   [junit4]   2> 18811 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 18811 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:44840_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1001
   [junit4]   2> 25812 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [http://127.0.0.1:45475/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 25816 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=http://127.0.0.1:44840 START replicas=[http://127.0.0.1:45475/collection1/] 
nUpdates=100
   [junit4]   2> 25820 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51502,localport=45475], receiveBufferSize:531000
   [junit4]   2> 25821 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=40154,localport=55828], receiveBufferSize=530904
   [junit4]   2> 25826 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:2.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 25826 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=4
   [junit4]   2> 25828 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 25828 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 25828 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 25829 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 25829 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 25829 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 25829 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 25829 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 25829 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ x:collection1 
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:44840_ c:collection1 s:shard2 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 26139 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 26140 INFO  (SocketProxy-Acceptor-39228) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51152,localport=39228], receiveBufferSize:531000
   [junit4]   2> 26141 INFO  (SocketProxy-Acceptor-39228) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=44007,localport=44766], receiveBufferSize=530904
   [junit4]   2> 26147 INFO  (qtp661315876-118) [n:127.0.0.1:39228_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 26147 INFO  (qtp661315876-118) [n:127.0.0.1:39228_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 26147 INFO  (qtp661315876-118) [n:127.0.0.1:39228_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 26147 INFO  (qtp661315876-118) [n:127.0.0.1:39228_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 5
   [junit4]   2> 26151 INFO  (SocketProxy-Acceptor-46191) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=59254,localport=46191], receiveBufferSize:531000
   [junit4]   2> 26152 INFO  (SocketProxy-Acceptor-46191) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=41342,localport=52742], receiveBufferSize=530904
   [junit4]   2> 26158 INFO  (SocketProxy-Acceptor-46191) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=59258,localport=46191], receiveBufferSize:531000
   [junit4]   2> 26158 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51516,localport=45475], receiveBufferSize:531000
   [junit4]   2> 26158 INFO  (SocketProxy-Acceptor-44840) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=46114,localport=44840], receiveBufferSize:531000
   [junit4]   2> 26160 INFO  (SocketProxy-Acceptor-46191) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=41342,localport=52750], receiveBufferSize=530904
   [junit4]   2> 26161 INFO  (SocketProxy-Acceptor-44840) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=38819,localport=34384], receiveBufferSize=530904
   [junit4]   2> 26161 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=40154,localport=55846], receiveBufferSize=530904
   [junit4]   2> 26162 INFO  (qtp634215519-150) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 26162 INFO  (qtp634215519-150) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 26162 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 26163 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 26164 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 26164 INFO  (qtp634215519-150) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 26164 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:collection1 s:shard2 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:46191/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 26164 INFO  (qtp634215519-150) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:46191/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 26164 INFO  (qtp1337522218-184) [n:127.0.0.1:46191_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 26164 INFO  (qtp1337522218-184) [n:127.0.0.1:46191_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 26165 INFO  (qtp1337522218-184) [n:127.0.0.1:46191_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 26165 INFO  (qtp1337522218-184) [n:127.0.0.1:46191_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:46191/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 26166 INFO  (qtp1337522218-186) [n:127.0.0.1:46191_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 11
   [junit4]   2> 26167 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51526,localport=45475], receiveBufferSize:531000
   [junit4]   2> 26170 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=40154,localport=55852], receiveBufferSize=530904
   [junit4]   2> 26173 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=1
   [junit4]   2> 26174 INFO  (SocketProxy-Acceptor-44840) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=46126,localport=44840], receiveBufferSize:531000
   [junit4]   2> 26175 INFO  (SocketProxy-Acceptor-44840) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=38819,localport=34392], receiveBufferSize=530904
   [junit4]   2> 26177 INFO  (qtp1606558187-215) [n:127.0.0.1:44840_ 
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 26178 INFO  (SocketProxy-Acceptor-46191) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=59278,localport=46191], receiveBufferSize:531000
   [junit4]   2> 26178 INFO  (SocketProxy-Acceptor-46191) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=41342,localport=52766], receiveBufferSize=530904
   [junit4]   2> 26183 INFO  (qtp1337522218-180) [n:127.0.0.1:46191_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 28184 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Creating collection with stateFormat=1: 
c8n_1x3_lf
   [junit4]   2> 28186 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51540,localport=45475], receiveBufferSize:531000
   [junit4]   2> 28186 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=40154,localport=55866], receiveBufferSize=530904
   [junit4]   2> 28201 INFO  (qtp634215519-150) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params 
replicationFactor=3&maxShardsPerNode=1&name=c8n_1x3_lf&action=CREATE&numShards=1&stateFormat=1&wt=javabin&version=2
 and sendToOCPQueue=true
   [junit4]   2> 28216 INFO  
(OverseerThreadFactory-68-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_    ] o.a.s.c.CreateCollectionCmd Create collection 
c8n_1x3_lf
   [junit4]   2> 28216 INFO  
(OverseerThreadFactory-68-thread-1-processing-n:127.0.0.1:39228_) 
[n:127.0.0.1:39228_    ] o.a.s.c.CreateCollectionCmd Only one config set found 
in zk - using it:conf1
   [junit4]   2> 28322 INFO  (SocketProxy-Acceptor-44840) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=46142,localport=44840], receiveBufferSize:531000
   [junit4]   2> 28322 INFO  (SocketProxy-Acceptor-39228) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51194,localport=39228], receiveBufferSize:531000
   [junit4]   2> 28322 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy accepted 
Socket[addr=/127.0.0.1,port=51544,localport=45475], receiveBufferSize:531000
   [junit4]   2> 28323 INFO  (SocketProxy-Acceptor-44840) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=38819,localport=34410], receiveBufferSize=530904
   [junit4]   2> 28324 INFO  (SocketProxy-Acceptor-45475) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=40154,localport=55878], receiveBufferSize=530904
   [junit4]   2> 28324 INFO  (SocketProxy-Acceptor-39228) [    ] 
o.a.s.c.SocketProxy proxy connection 
Socket[addr=/127.0.0.1,port=44007,localport=44810], receiveBufferSize=530904
   [junit4]   2> 28326 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_    ] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica2&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
   [junit4]   2> 28326 INFO  (qtp634215519-155) [n:127.0.0.1:45475_    ] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica1&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
   [junit4]   2> 28328 INFO  (qtp661315876-119) [n:127.0.0.1:39228_    ] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica3&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
   [junit4]   2> 29353 WARN  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 29353 WARN  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 29354 INFO  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.4.2
   [junit4]   2> 29354 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.4.2
   [junit4]   2> 29390 WARN  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 29391 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.4.2
   [junit4]   2> 29393 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.s.IndexSchema 
[c8n_1x3_lf_shard1_replica1] Schema name=test
   [junit4]   2> 29393 INFO  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.s.IndexSchema 
[c8n_1x3_lf_shard1_replica3] Schema name=test
   [junit4]   2> 29431 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.s.IndexSchema 
[c8n_1x3_lf_shard1_replica2] Schema name=test
   [junit4]   2> 29599 WARN  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.s.IndexSchema 
[c8n_1x3_lf_shard1_replica1] default search field in schema is text. WARNING: 
Deprecated, please use 'df' on request instead.
   [junit4]   2> 29599 WARN  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.s.IndexSchema 
[c8n_1x3_lf_shard1_replica3] default search field in schema is text. WARNING: 
Deprecated, please use 'df' on request instead.
   [junit4]   2> 29603 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 29617 INFO  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 29619 WARN  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.s.IndexSchema 
[c8n_1x3_lf_shard1_replica2] default search field in schema is text. WARNING: 
Deprecated, please use 'df' on request instead.
   [junit4]   2> 29622 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 29634 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.CoreContainer 
Creating SolrCore 'c8n_1x3_lf_shard1_replica1' using configuration from 
collection c8n_1x3_lf
   [junit4]   2> 29634 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.SolrCore 
[[c8n_1x3_lf_shard1_replica1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores/c8n_1x3_lf_shard1_replica1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001/shard-1-001/cores/c8n_1x3_lf_shard1_replica1/data/]
   [junit4]   2> 29635 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.JmxMonitoredMap 
JMX monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@2d3b71f8
   [junit4]   2> 29638 INFO  (qtp634215519-155) [n:127.0.0.1:45475_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: 
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=6, 
maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.4091786280366779]
   [junit4]   2> 29643 INFO  (qtp1606558187-213) [n:127.0.0.1:44840_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.c.CoreContainer 
Creating SolrCore 'c8n_1x3_lf_shard1_replica2' using configuration from 
collection c8n_1x3_lf
   [junit4]   2> 29643 INFO  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.CoreContainer 
Creating SolrCore 'c8n_1x3_lf_shard1_replica3' using configuration from 
collection c8n_1x3_lf
   [junit4]   2> 29644 INFO  (qtp661315876-119) [n:127.0.0.1:39228_ 
c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.SolrCore 
[[c8n_1x3_lf_shard1_replica3] ] Opening new SolrCore at [/home/jenkins/

[...truncated too long message...]

-n:127.0.0.1:44840_ x:c8n_1x3_lf_shard1_replica2 s:shard1 c:c8n_1x3_lf 
r:core_node1) [n:127.0.0.1:44840_ c:c8n_1x3_lf s:shard1 r:core_node1 
x:c8n_1x3_lf_shard1_replica2] o.a.s.c.RecoveryStrategy RecoveryStrategy has 
been closed
   [junit4]   2> 155878 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ 
x:c8n_1x3_lf_shard1_replica2 s:shard1 c:c8n_1x3_lf r:core_node1) 
[n:127.0.0.1:44840_ c:c8n_1x3_lf s:shard1 r:core_node1 
x:c8n_1x3_lf_shard1_replica2] o.a.s.c.RecoveryStrategy Finished recovery 
process, successful=[false]
   [junit4]   2> 155878 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ 
x:c8n_1x3_lf_shard1_replica2 s:shard1 c:c8n_1x3_lf r:core_node1) 
[n:127.0.0.1:44840_ c:c8n_1x3_lf s:shard1 r:core_node1 
x:c8n_1x3_lf_shard1_replica2] o.a.s.c.SolrCore [c8n_1x3_lf_shard1_replica2]  
CLOSING SolrCore org.apache.solr.core.SolrCore@5bef9aa
   [junit4]   2> 155879 WARN  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ 
x:c8n_1x3_lf_shard1_replica2 s:shard1 c:c8n_1x3_lf r:core_node1) 
[n:127.0.0.1:44840_ c:c8n_1x3_lf s:shard1 r:core_node1 
x:c8n_1x3_lf_shard1_replica2] o.a.s.c.RecoveryStrategy Stopping recovery for 
core=[c8n_1x3_lf_shard1_replica2] coreNodeName=[core_node1]
   [junit4]   2> 155914 INFO  
(recoveryExecutor-36-thread-1-processing-n:127.0.0.1:44840_ 
x:c8n_1x3_lf_shard1_replica2 s:shard1 c:c8n_1x3_lf r:core_node1) 
[n:127.0.0.1:44840_ c:c8n_1x3_lf s:shard1 r:core_node1 
x:c8n_1x3_lf_shard1_replica2] o.a.s.m.SolrMetricManager Closing metric 
reporters for: solr.core.c8n_1x3_lf.shard1.replica2
   [junit4]   2> 155915 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.Overseer Overseer (id=97571633381965838-127.0.0.1:44840_-n_0000000003) 
closing
   [junit4]   2> 155915 INFO  
(OverseerStateUpdate-97571633381965838-127.0.0.1:44840_-n_0000000003) 
[n:127.0.0.1:44840_    ] o.a.s.c.Overseer Overseer Loop exiting : 
127.0.0.1:44840_
   [junit4]   2> 155918 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] 
o.a.z.s.NIOServerCnxn caught end of stream exception
   [junit4]   2> EndOfStreamException: Unable to read additional data from 
client sessionid 0x15aa4e233e3000e, likely client has closed socket
   [junit4]   2>        at 
org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:228)
   [junit4]   2>        at 
org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:208)
   [junit4]   2>        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 155918 WARN  
(zkCallback-38-thread-5-processing-n:127.0.0.1:44840_) [n:127.0.0.1:44840_    ] 
o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: 
[KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 155919 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 155921 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.AbstractConnector Stopped 
ServerConnector@3b192c7c{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 155921 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@39935198{/,null,UNAVAILABLE}
   [junit4]   2> 155922 INFO  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1:37923 37923
   [junit4]   2> 155941 INFO  (Thread-35) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1:37923 37923
   [junit4]   2> 155943 WARN  (Thread-35) [    ] o.a.s.c.ZkTestServer Watch 
limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2>        5       /solr/aliases.json
   [junit4]   2>        5       /solr/clusterprops.json
   [junit4]   2>        4       /solr/security.json
   [junit4]   2>        4       /solr/configs/conf1
   [junit4]   2>        3       /solr/collections/c8n_1x3_lf/state.json
   [junit4]   2>        3       /solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2>        5       /solr/clusterstate.json
   [junit4]   2>        2       
/solr/overseer_elect/election/97571633381965828-127.0.0.1:39228_-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2>        142     /solr/overseer/collection-queue-work
   [junit4]   2>        52      /solr/overseer/queue
   [junit4]   2>        12      /solr/overseer/queue-work
   [junit4]   2>        5       /solr/live_nodes
   [junit4]   2>        5       /solr/collections
   [junit4]   2> 
   [junit4]   2> 155943 WARN  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SocketProxy Closing 3 connections to: http://127.0.0.1:46191/, target: 
http://127.0.0.1:41342/
   [junit4]   2> 155943 WARN  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SocketProxy Closing 16 connections to: http://127.0.0.1:39228/, target: 
http://127.0.0.1:44007/
   [junit4]   2> 155943 WARN  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SocketProxy Closing 0 connections to: http://127.0.0.1:45475/, target: 
http://127.0.0.1:40154/
   [junit4]   2> 155943 WARN  
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[AFDCF4D88650EC2C]) [    ] 
o.a.s.c.SocketProxy Closing 1 connections to: http://127.0.0.1:44840/, target: 
http://127.0.0.1:38819/
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=LeaderFailoverAfterPartitionTest -Dtests.method=test 
-Dtests.seed=AFDCF4D88650EC2C -Dtests.multiplier=3 -Dtests.slow=true 
-Dtests.locale=ru -Dtests.timezone=America/Panama -Dtests.asserts=true 
-Dtests.file.encoding=UTF-8
   [junit4] FAILURE  145s J2 | LeaderFailoverAfterPartitionTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: Expected 2 of 3 
replicas to be active but only found 1; 
[core_node3:{"core":"c8n_1x3_lf_shard1_replica3","base_url":"http://127.0.0.1:39228","node_name":"127.0.0.1:39228_","state":"active","leader":"true"}];
 clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/33)={
   [junit4]    >   "replicationFactor":"3",
   [junit4]    >   "shards":{"shard1":{
   [junit4]    >       "range":"80000000-7fffffff",
   [junit4]    >       "state":"active",
   [junit4]    >       "replicas":{
   [junit4]    >         "core_node1":{
   [junit4]    >           "state":"down",
   [junit4]    >           "base_url":"http://127.0.0.1:44840";,
   [junit4]    >           "core":"c8n_1x3_lf_shard1_replica2",
   [junit4]    >           "node_name":"127.0.0.1:44840_"},
   [junit4]    >         "core_node2":{
   [junit4]    >           "core":"c8n_1x3_lf_shard1_replica1",
   [junit4]    >           "base_url":"http://127.0.0.1:45475";,
   [junit4]    >           "node_name":"127.0.0.1:45475_",
   [junit4]    >           "state":"down"},
   [junit4]    >         "core_node3":{
   [junit4]    >           "core":"c8n_1x3_lf_shard1_replica3",
   [junit4]    >           "base_url":"http://127.0.0.1:39228";,
   [junit4]    >           "node_name":"127.0.0.1:39228_",
   [junit4]    >           "state":"active",
   [junit4]    >           "leader":"true"}}}},
   [junit4]    >   "router":{"name":"compositeId"},
   [junit4]    >   "maxShardsPerNode":"1",
   [junit4]    >   "autoAddReplicas":"false"}
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([AFDCF4D88650EC2C:2788CB0228AC81D4]:0)
   [junit4]    >        at 
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.testRf3WithLeaderFailover(LeaderFailoverAfterPartitionTest.java:168)
   [junit4]    >        at 
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test(LeaderFailoverAfterPartitionTest.java:55)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
   [junit4]    >        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 155945 INFO  
(SUITE-LeaderFailoverAfterPartitionTest-seed#[AFDCF4D88650EC2C]-worker) [    ] 
o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: 
/home/jenkins/workspace/Lucene-Solr-6.4-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.LeaderFailoverAfterPartitionTest_AFDCF4D88650EC2C-001
   [junit4]   2> Mar 06, 2017 6:31:57 PM 
com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 2 leaked 
thread(s).
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene62): 
{range_facet_l_dv=PostingsFormat(name=LuceneVarGapFixedInterval), 
_version_=PostingsFormat(name=LuceneFixedGap), 
multiDefault=PostingsFormat(name=MockRandom), 
a_t=PostingsFormat(name=MockRandom), 
intDefault=PostingsFormat(name=LuceneFixedGap), 
id=PostingsFormat(name=LuceneVarGapFixedInterval), 
range_facet_i_dv=PostingsFormat(name=MockRandom), 
text=PostingsFormat(name=Memory doPackFST= false), 
range_facet_l=PostingsFormat(name=MockRandom), 
timestamp=PostingsFormat(name=MockRandom)}, 
docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene54), 
range_facet_i_dv=DocValuesFormat(name=Asserting), 
timestamp=DocValuesFormat(name=Asserting)}, maxPointsInLeafNode=1991, 
maxMBSortInHeap=6.767218816617811, 
sim=RandomSimilarity(queryNorm=false,coord=yes): {}, locale=ru, 
timezone=America/Panama
   [junit4]   2> NOTE: Linux 4.4.0-53-generic amd64/Oracle Corporation 
1.8.0_121 (64-bit)/cpus=12,threads=1,free=383704104,total=509018112
   [junit4]   2> NOTE: All tests run in this JVM: [TestManagedResourceStorage, 
TestPostingsSolrHighlighter, TestDynamicFieldCollectionResource, 
DirectUpdateHandlerOptimizeTest, HdfsUnloadDistributedZkTest, TestBinaryField, 
TestSchemaVersionResource, LeaderFailoverAfterPartitionTest]
   [junit4] Completed [48/680 (1!)] on J2 in 145.83s, 1 test, 1 failure <<< 
FAILURES!

[...truncated 64507 lines...]

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org
For additional commands, e-mail: dev-h...@lucene.apache.org

Reply via email to