Build: https://jenkins.thetaphi.de/job/Lucene-Solr-6.x-MacOSX/817/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseSerialGC
1 tests failed.
FAILED: org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test
Error Message:
Expected 2 of 3 replicas to be active but only found 1;
[core_node3:{"core":"c8n_1x3_lf_shard1_replica2","base_url":"http://127.0.0.1:60156/px","node_name":"127.0.0.1:60156_px","state":"active","leader":"true"}];
clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/30)={
"replicationFactor":"3", "shards":{"shard1":{
"range":"80000000-7fffffff", "state":"active", "replicas":{
"core_node1":{ "core":"c8n_1x3_lf_shard1_replica3",
"base_url":"http://127.0.0.1:60161/px",
"node_name":"127.0.0.1:60161_px", "state":"down"},
"core_node2":{ "state":"down",
"base_url":"http://127.0.0.1:60138/px",
"core":"c8n_1x3_lf_shard1_replica1",
"node_name":"127.0.0.1:60138_px"}, "core_node3":{
"core":"c8n_1x3_lf_shard1_replica2",
"base_url":"http://127.0.0.1:60156/px",
"node_name":"127.0.0.1:60156_px", "state":"active",
"leader":"true"}}}}, "router":{"name":"compositeId"},
"maxShardsPerNode":"1", "autoAddReplicas":"false"}
Stack Trace:
java.lang.AssertionError: Expected 2 of 3 replicas to be active but only found
1;
[core_node3:{"core":"c8n_1x3_lf_shard1_replica2","base_url":"http://127.0.0.1:60156/px","node_name":"127.0.0.1:60156_px","state":"active","leader":"true"}];
clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/30)={
"replicationFactor":"3",
"shards":{"shard1":{
"range":"80000000-7fffffff",
"state":"active",
"replicas":{
"core_node1":{
"core":"c8n_1x3_lf_shard1_replica3",
"base_url":"http://127.0.0.1:60161/px",
"node_name":"127.0.0.1:60161_px",
"state":"down"},
"core_node2":{
"state":"down",
"base_url":"http://127.0.0.1:60138/px",
"core":"c8n_1x3_lf_shard1_replica1",
"node_name":"127.0.0.1:60138_px"},
"core_node3":{
"core":"c8n_1x3_lf_shard1_replica2",
"base_url":"http://127.0.0.1:60156/px",
"node_name":"127.0.0.1:60156_px",
"state":"active",
"leader":"true"}}}},
"router":{"name":"compositeId"},
"maxShardsPerNode":"1",
"autoAddReplicas":"false"}
at
__randomizedtesting.SeedInfo.seed([4C69E9CF74932785:C43DD615DA6F4A7D]:0)
at org.junit.Assert.fail(Assert.java:93)
at org.junit.Assert.assertTrue(Assert.java:43)
at
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.testRf3WithLeaderFailover(LeaderFailoverAfterPartitionTest.java:168)
at
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test(LeaderFailoverAfterPartitionTest.java:55)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.lang.Thread.run(Thread.java:745)
Build Log:
[...truncated 11247 lines...]
[junit4] Suite: org.apache.solr.cloud.LeaderFailoverAfterPartitionTest
[junit4] 2> Creating dataDir:
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/init-core-data-001
[junit4] 2> 619457 WARN
(SUITE-LeaderFailoverAfterPartitionTest-seed#[4C69E9CF74932785]-worker) [ ]
o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=7 numCloses=7
[junit4] 2> 619457 INFO
(SUITE-LeaderFailoverAfterPartitionTest-seed#[4C69E9CF74932785]-worker) [ ]
o.a.s.SolrTestCaseJ4 Using TrieFields
[junit4] 2> 619462 INFO
(SUITE-LeaderFailoverAfterPartitionTest-seed#[4C69E9CF74932785]-worker) [ ]
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via:
@org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
w/ MAC_OS_X supressed clientAuth
[junit4] 2> 619462 INFO
(SUITE-LeaderFailoverAfterPartitionTest-seed#[4C69E9CF74932785]-worker) [ ]
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /px/
[junit4] 2> 619463 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 619463 INFO (Thread-747) [ ] o.a.s.c.ZkTestServer client
port:0.0.0.0/0.0.0.0:0
[junit4] 2> 619464 INFO (Thread-747) [ ] o.a.s.c.ZkTestServer Starting
server
[junit4] 2> 619571 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkTestServer start zk server on port:60100
[junit4] 2> 619607 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
to /configs/conf1/solrconfig.xml
[junit4] 2> 619611 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml
to /configs/conf1/schema.xml
[junit4] 2> 619614 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 619617 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
to /configs/conf1/stopwords.txt
[junit4] 2> 619620 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt
to /configs/conf1/protwords.txt
[junit4] 2> 619623 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml
to /configs/conf1/currency.xml
[junit4] 2> 619626 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
to /configs/conf1/enumsConfig.xml
[junit4] 2> 619628 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
to /configs/conf1/open-exchange-rates.json
[junit4] 2> 619631 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 619634 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
to /configs/conf1/old_synonyms.txt
[junit4] 2> 619637 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
to /configs/conf1/synonyms.txt
[junit4] 2> 620028 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/control-001/cores/collection1
[junit4] 2> 620032 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 620035 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@4a8cad7d{/px,null,AVAILABLE}
[junit4] 2> 620053 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@3fe2fdcf{HTTP/1.1,[http/1.1]}{127.0.0.1:60123}
[junit4] 2> 620053 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server Started @625755ms
[junit4] 2> 620057 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/tempDir-001/control/data,
hostContext=/px, hostPort=60122,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/control-001/cores}
[junit4] 2> 620057 ERROR
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 620058 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
6.6.0
[junit4] 2> 620058 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 620058 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 620058 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-04-11T14:08:43.292Z
[junit4] 2> 620062 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 620063 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/control-001/solr.xml
[junit4] 2> 620077 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params:
socketTimeout=340000&connTimeout=45000&retry=true
[junit4] 2> 620079 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:60100/solr
[junit4] 2> 620134 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60122_px ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 620140 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60122_px ] o.a.s.c.OverseerElectionContext I am going to be the
leader 127.0.0.1:60122_px
[junit4] 2> 620141 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60122_px ] o.a.s.c.Overseer Overseer
(id=97774450946867204-127.0.0.1:60122_px-n_0000000000) starting
[junit4] 2> 620164 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60122_px ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:60122_px
[junit4] 2> 620167 INFO
(zkCallback-511-thread-1-processing-n:127.0.0.1:60122_px) [n:127.0.0.1:60122_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 620337 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60122_px ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/control-001/cores
[junit4] 2> 620337 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60122_px ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 620338 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 620342 INFO
(OverseerStateUpdate-97774450946867204-127.0.0.1:60122_px-n_0000000000)
[n:127.0.0.1:60122_px ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 621372 WARN
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.c.Config
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory>
instead.
[junit4] 2> 621373 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.c.SolrConfig
Using Lucene MatchVersion: 6.6.0
[junit4] 2> 621393 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 621503 WARN
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 621507 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.s.IndexSchema
Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 621535 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from
collection control_collection, trusted=true
[junit4] 2> 621535 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.c.SolrCore
solr.RecoveryStrategy.Builder
[junit4] 2> 621535 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/control-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/control-001/cores/collection1/data/]
[junit4] 2> 621535 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@4fb7b494
[junit4] 2> 621537 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy:
maxMergeAtOnce=21, maxMergeAtOnceExplicit=24, maxMergedSegmentMB=87.2119140625,
floorSegmentMB=1.4560546875, forceMergeDeletesPctAllowed=15.56625461044676,
segmentsPerTier=37.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 621543 WARN
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 621622 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.u.UpdateHandler Using UpdateLog implementation:
org.apache.solr.update.UpdateLog
[junit4] 2> 621623 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 621624 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 621624 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 621625 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy:
maxMergeAtOnce=46, maxMergeAtOnceExplicit=42, maxMergedSegmentMB=97.7275390625,
floorSegmentMB=2.0087890625, forceMergeDeletesPctAllowed=5.3783988875795306,
segmentsPerTier=32.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 621625 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@3212a59c[collection1] main]
[junit4] 2> 621629 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 621629 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 621629 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 621631 INFO
(searcherExecutor-1699-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@3212a59c[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 621631 INFO
(coreLoadExecutor-1698-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px c:control_collection x:collection1] o.a.s.u.UpdateLog
Could not find max version in index or recent updates, using new clock
1564391217420042240
[junit4] 2> 621657 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas
found to continue.
[junit4] 2> 621657 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new
leader - try and sync
[junit4] 2> 621657 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:60122/px/collection1/
[junit4] 2> 621657 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync
replicas to me
[junit4] 2> 621657 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy
http://127.0.0.1:60122/px/collection1/ has no replicas
[junit4] 2> 621657 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Found all
replicas participating in election, clear LIR
[junit4] 2> 621667 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new
leader: http://127.0.0.1:60122/px/collection1/ shard1
[junit4] 2> 621837 INFO
(coreZkRegister-1691-thread-1-processing-n:127.0.0.1:60122_px x:collection1
c:control_collection) [n:127.0.0.1:60122_px c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery
necessary
[junit4] 2> 622106 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 622108 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:60100/solr ready
[junit4] 2> 622109 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection
loss:false
[junit4] 2> 622405 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001/cores/collection1
[junit4] 2> 622407 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001
[junit4] 2> 622407 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 622409 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@46a46883{/px,null,AVAILABLE}
[junit4] 2> 622410 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@1dc0da89{HTTP/1.1,[http/1.1]}{127.0.0.1:60139}
[junit4] 2> 622410 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server Started @628112ms
[junit4] 2> 622410 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/tempDir-001/jetty1,
solrconfig=solrconfig.xml, hostContext=/px, hostPort=60138,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001/cores}
[junit4] 2> 622411 ERROR
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 622411 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
6.6.0
[junit4] 2> 622411 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 622411 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 622411 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-04-11T14:08:45.645Z
[junit4] 2> 622416 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 622416 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001/solr.xml
[junit4] 2> 622427 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params:
socketTimeout=340000&connTimeout=45000&retry=true
[junit4] 2> 622428 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:60100/solr
[junit4] 2> 622449 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60138_px ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (1)
[junit4] 2> 622454 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60138_px ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 622459 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60138_px ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:60138_px
[junit4] 2> 622463 INFO
(zkCallback-520-thread-1-processing-n:127.0.0.1:60138_px) [n:127.0.0.1:60138_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 622463 INFO
(zkCallback-511-thread-2-processing-n:127.0.0.1:60122_px) [n:127.0.0.1:60122_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 622463 INFO (zkCallback-515-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 622544 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60138_px ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001/cores
[junit4] 2> 622544 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60138_px ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 622545 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 622547 INFO
(OverseerStateUpdate-97774450946867204-127.0.0.1:60122_px-n_0000000000)
[n:127.0.0.1:60122_px ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard2
[junit4] 2> 623584 WARN
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.Config Beginning
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
[junit4] 2> 623584 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 6.6.0
[junit4] 2> 623596 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 623682 WARN
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 623685 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 623713 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection
collection1, trusted=true
[junit4] 2> 623713 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.SolrCore
solr.RecoveryStrategy.Builder
[junit4] 2> 623713 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-1-001/cores/collection1/data/]
[junit4] 2> 623714 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap
JMX monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@4fb7b494
[junit4] 2> 623715 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=24,
maxMergedSegmentMB=87.2119140625, floorSegmentMB=1.4560546875,
forceMergeDeletesPctAllowed=15.56625461044676, segmentsPerTier=37.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 623720 WARN
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 623787 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 623787 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 623788 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 623788 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 623789 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=46, maxMergeAtOnceExplicit=42,
maxMergedSegmentMB=97.7275390625, floorSegmentMB=2.0087890625,
forceMergeDeletesPctAllowed=5.3783988875795306, segmentsPerTier=32.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 623789 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@1c61b83c[collection1] main]
[junit4] 2> 623791 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 623792 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 623792 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 623794 INFO
(searcherExecutor-1710-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@1c61b83c[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 623794 INFO
(coreLoadExecutor-1709-thread-1-processing-n:127.0.0.1:60138_px)
[n:127.0.0.1:60138_px c:collection1 x:collection1] o.a.s.u.UpdateLog Could
not find max version in index or recent updates, using new clock
1564391219688112128
[junit4] 2> 623806 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to
continue.
[junit4] 2> 623806 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try
and sync
[junit4] 2> 623806 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:60138/px/collection1/
[junit4] 2> 623806 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 623806 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:60138/px/collection1/ has
no replicas
[junit4] 2> 623806 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas
participating in election, clear LIR
[junit4] 2> 623814 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader:
http://127.0.0.1:60138/px/collection1/ shard2
[junit4] 2> 623984 INFO
(coreZkRegister-1704-thread-1-processing-n:127.0.0.1:60138_px x:collection1
c:collection1) [n:127.0.0.1:60138_px c:collection1 s:shard2 r:core_node1
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 624546 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001/cores/collection1
[junit4] 2> 624548 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001
[junit4] 2> 624549 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 624551 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@55b2e83f{/px,null,AVAILABLE}
[junit4] 2> 624552 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@3557408b{HTTP/1.1,[http/1.1]}{127.0.0.1:60157}
[junit4] 2> 624552 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server Started @630254ms
[junit4] 2> 624552 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/tempDir-001/jetty2,
solrconfig=solrconfig.xml, hostContext=/px, hostPort=60156,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001/cores}
[junit4] 2> 624552 ERROR
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 624553 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
6.6.0
[junit4] 2> 624553 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 624553 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 624553 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-04-11T14:08:47.787Z
[junit4] 2> 624558 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 624558 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001/solr.xml
[junit4] 2> 624570 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params:
socketTimeout=340000&connTimeout=45000&retry=true
[junit4] 2> 624571 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:60100/solr
[junit4] 2> 624589 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60156_px ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (2)
[junit4] 2> 624594 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60156_px ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 624598 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60156_px ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:60156_px
[junit4] 2> 624601 INFO
(zkCallback-511-thread-1-processing-n:127.0.0.1:60122_px) [n:127.0.0.1:60122_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 624603 INFO
(zkCallback-526-thread-1-processing-n:127.0.0.1:60156_px) [n:127.0.0.1:60156_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 624602 INFO (zkCallback-515-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 624602 INFO
(zkCallback-520-thread-1-processing-n:127.0.0.1:60138_px) [n:127.0.0.1:60138_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 624692 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60156_px ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001/cores
[junit4] 2> 624692 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60156_px ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 624693 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 624695 INFO
(OverseerStateUpdate-97774450946867204-127.0.0.1:60122_px-n_0000000000)
[n:127.0.0.1:60122_px ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 625717 WARN
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.Config Beginning
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
[junit4] 2> 625717 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 6.6.0
[junit4] 2> 625732 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 625821 WARN
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 625823 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 625845 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection
collection1, trusted=true
[junit4] 2> 625846 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.SolrCore
solr.RecoveryStrategy.Builder
[junit4] 2> 625846 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-2-001/cores/collection1/data/]
[junit4] 2> 625846 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap
JMX monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@4fb7b494
[junit4] 2> 625848 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=24,
maxMergedSegmentMB=87.2119140625, floorSegmentMB=1.4560546875,
forceMergeDeletesPctAllowed=15.56625461044676, segmentsPerTier=37.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 625853 WARN
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 625915 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 625915 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 625923 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 625923 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 625924 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=46, maxMergeAtOnceExplicit=42,
maxMergedSegmentMB=97.7275390625, floorSegmentMB=2.0087890625,
forceMergeDeletesPctAllowed=5.3783988875795306, segmentsPerTier=32.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 625924 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@29ab87c2[collection1] main]
[junit4] 2> 625926 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 625927 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 625927 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 625928 INFO
(searcherExecutor-1721-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@29ab87c2[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 625928 INFO
(coreLoadExecutor-1720-thread-1-processing-n:127.0.0.1:60156_px)
[n:127.0.0.1:60156_px c:collection1 x:collection1] o.a.s.u.UpdateLog Could
not find max version in index or recent updates, using new clock
1564391221925773312
[junit4] 2> 625939 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to
continue.
[junit4] 2> 625939 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try
and sync
[junit4] 2> 625939 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:60156/px/collection1/
[junit4] 2> 625939 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 625939 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:60156/px/collection1/ has
no replicas
[junit4] 2> 625939 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas
participating in election, clear LIR
[junit4] 2> 625946 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader:
http://127.0.0.1:60156/px/collection1/ shard1
[junit4] 2> 626115 INFO
(coreZkRegister-1715-thread-1-processing-n:127.0.0.1:60156_px x:collection1
c:collection1) [n:127.0.0.1:60156_px c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 626987 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001/cores/collection1
[junit4] 2> 626997 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001
[junit4] 2> 626997 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 627000 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@fa016fc{/px,null,AVAILABLE}
[junit4] 2> 627000 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@433f9e2f{HTTP/1.1,[http/1.1]}{127.0.0.1:60162}
[junit4] 2> 627000 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.e.j.s.Server Started @632702ms
[junit4] 2> 627000 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/tempDir-001/jetty3,
solrconfig=solrconfig.xml, hostContext=/px, hostPort=60161,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001/cores}
[junit4] 2> 627001 ERROR
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 627002 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
6.6.0
[junit4] 2> 627002 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 627002 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 627002 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-04-11T14:08:50.236Z
[junit4] 2> 627008 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 627008 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001/solr.xml
[junit4] 2> 627072 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params:
socketTimeout=340000&connTimeout=45000&retry=true
[junit4] 2> 627073 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:60100/solr
[junit4] 2> 627093 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60161_px ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (3)
[junit4] 2> 627099 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60161_px ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 627103 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60161_px ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:60161_px
[junit4] 2> 627106 INFO (zkCallback-515-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 627107 INFO
(zkCallback-532-thread-1-processing-n:127.0.0.1:60161_px) [n:127.0.0.1:60161_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 627107 INFO
(zkCallback-511-thread-2-processing-n:127.0.0.1:60122_px) [n:127.0.0.1:60122_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 627106 INFO
(zkCallback-520-thread-1-processing-n:127.0.0.1:60138_px) [n:127.0.0.1:60138_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 627106 INFO
(zkCallback-526-thread-1-processing-n:127.0.0.1:60156_px) [n:127.0.0.1:60156_px
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 627221 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60161_px ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001/cores
[junit4] 2> 627221 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785])
[n:127.0.0.1:60161_px ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 627221 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 627224 INFO
(OverseerStateUpdate-97774450946867204-127.0.0.1:60122_px-n_0000000000)
[n:127.0.0.1:60122_px ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard2
[junit4] 2> 628244 WARN
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.Config Beginning
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
[junit4] 2> 628244 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 6.6.0
[junit4] 2> 628256 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 628356 WARN
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 628358 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 628380 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection
collection1, trusted=true
[junit4] 2> 628381 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.SolrCore
solr.RecoveryStrategy.Builder
[junit4] 2> 628381 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001/shard-3-001/cores/collection1/data/]
[junit4] 2> 628381 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap
JMX monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@4fb7b494
[junit4] 2> 628382 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=24,
maxMergedSegmentMB=87.2119140625, floorSegmentMB=1.4560546875,
forceMergeDeletesPctAllowed=15.56625461044676, segmentsPerTier=37.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 628388 WARN
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 628452 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 628452 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 628453 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 628453 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 628454 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=46, maxMergeAtOnceExplicit=42,
maxMergedSegmentMB=97.7275390625, floorSegmentMB=2.0087890625,
forceMergeDeletesPctAllowed=5.3783988875795306, segmentsPerTier=32.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
[junit4] 2> 628454 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@2b6f082f[collection1] main]
[junit4] 2> 628456 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 628457 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 628457 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 628459 INFO
(searcherExecutor-1732-thread-1-processing-n:127.0.0.1:60161_px x:collection1
c:collection1) [n:127.0.0.1:60161_px c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@2b6f082f[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 628459 INFO
(coreLoadExecutor-1731-thread-1-processing-n:127.0.0.1:60161_px)
[n:127.0.0.1:60161_px c:collection1 x:collection1] o.a.s.u.UpdateLog Could
not find max version in index or recent updates, using new clock
1564391224579719168
[junit4] 2> 628465 INFO
(coreZkRegister-1726-thread-1-processing-n:127.0.0.1:60161_px x:collection1
c:collection1) [n:127.0.0.1:60161_px c:collection1 s:shard2 r:core_node3
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 628465 INFO
(updateExecutor-529-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running
recovery
[junit4] 2> 628466 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery
process. recoveringAfterStartup=true
[junit4] 2> 628467 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ######
startupVersions=[[]]
[junit4] 2> 628467 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering
updates. core=[collection1]
[junit4] 2> 628467 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer
updates. FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 628468 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state
of core [collection1] as recovering, leader is
[http://127.0.0.1:60138/px/collection1/] and I am
[http://127.0.0.1:60161/px/collection1/]
[junit4] 2> 628473 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep
recovery command to [http://127.0.0.1:60138/px]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:60161_px&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 628476 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60166,localport=60138], receiveBufferSize:408300
[junit4] 2> 628485 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60139,localport=60167], receiveBufferSize=408300
[junit4] 2> 628486 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px ]
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 628487 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px ]
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard2 of collection1) have state: recovering
[junit4] 2> 628487 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard2, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=down, localState=active,
nodeName=127.0.0.1:60161_px, coreNodeName=core_node3,
onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:60161/px","node_name":"127.0.0.1:60161_px","state":"down"}
[junit4] 2> 628943 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.SolrTestCaseJ4 ###Starting test
[junit4] 2> 628943 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait
30000 for each attempt
[junit4] 2> 628943 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection:
collection1 failOnTimeout:true timeout (sec):30000
[junit4] 2> 629487 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard2, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=recovering, localState=active,
nodeName=127.0.0.1:60161_px, coreNodeName=core_node3,
onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:60161/px","node_name":"127.0.0.1:60161_px","state":"recovering"}
[junit4] 2> 629488 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px ]
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 629488 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:60161_px&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1001
[junit4] 2> 629989 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to
PeerSync from [http://127.0.0.1:60138/px/collection1/] -
recoveringAfterStartup=[true]
[junit4] 2> 629991 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync:
core=collection1 url=http://127.0.0.1:60161/px START
replicas=[http://127.0.0.1:60138/px/collection1/] nUpdates=100
[junit4] 2> 629993 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60168,localport=60138], receiveBufferSize:408300
[junit4] 2> 629994 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60139,localport=60169], receiveBufferSize=408300
[junit4] 2> 629996 INFO (qtp1451388136-3230) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 629996 INFO (qtp1451388136-3230) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/px path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=0
[junit4] 2> 629997 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint
millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 629997 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync.
No need to do a PeerSync
[junit4] 2> 629997 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 629998 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No
uncommitted changes. Skipping IW.commit.
[junit4] 2> 629998 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 629998 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of
recovery was successful.
[junit4] 2> 629998 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates
buffered during PeerSync.
[junit4] 2> 629998 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
[junit4] 2> 629998 INFO
(recoveryExecutor-530-thread-1-processing-n:127.0.0.1:60161_px x:collection1
s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:60161_px c:collection1
s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as
Active after recovery.
[junit4] 2> 630944 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
[junit4] 2> 630945 INFO (SocketProxy-Acceptor-60122) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60170,localport=60122], receiveBufferSize:408300
[junit4] 2> 630948 INFO (SocketProxy-Acceptor-60122) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60123,localport=60171], receiveBufferSize=408300
[junit4] 2> 630951 INFO (qtp1494775286-3190) [n:127.0.0.1:60122_px
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 630952 INFO (qtp1494775286-3190) [n:127.0.0.1:60122_px
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
[junit4] 2> 630953 INFO (qtp1494775286-3190) [n:127.0.0.1:60122_px
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 630953 INFO (qtp1494775286-3190) [n:127.0.0.1:60122_px
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/px path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 2
[junit4] 2> 630954 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60172,localport=60156], receiveBufferSize:408300
[junit4] 2> 630955 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60157,localport=60173], receiveBufferSize=408300
[junit4] 2> 630961 INFO (SocketProxy-Acceptor-60161) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60175,localport=60161], receiveBufferSize:408300
[junit4] 2> 630961 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60174,localport=60138], receiveBufferSize:408300
[junit4] 2> 630962 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60176,localport=60156], receiveBufferSize:408300
[junit4] 2> 630964 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60157,localport=60179], receiveBufferSize=408300
[junit4] 2> 630965 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60139,localport=60178], receiveBufferSize=408300
[junit4] 2> 630966 INFO (SocketProxy-Acceptor-60161) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60162,localport=60177], receiveBufferSize=408300
[junit4] 2> 630968 INFO (qtp2013245539-3257) [n:127.0.0.1:60156_px
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 630969 INFO (qtp1451388136-3232) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 630969 INFO (qtp1451388136-3232) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 630969 INFO (qtp2013245539-3257) [n:127.0.0.1:60156_px
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 630970 INFO (qtp1820604187-3285) [n:127.0.0.1:60161_px
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 630971 INFO (qtp2013245539-3257) [n:127.0.0.1:60156_px
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 630972 INFO (qtp1451388136-3232) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 630973 INFO (qtp1820604187-3285) [n:127.0.0.1:60161_px
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 630973 INFO (qtp1451388136-3232) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/px path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:60156/px/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 3
[junit4] 2> 630973 INFO (qtp1820604187-3285) [n:127.0.0.1:60161_px
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 630973 INFO (qtp2013245539-3257) [n:127.0.0.1:60156_px
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/px path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:60156/px/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 5
[junit4] 2> 630973 INFO (qtp1820604187-3285) [n:127.0.0.1:60161_px
c:collection1 s:shard2 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/px path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:60156/px/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 3
[junit4] 2> 630977 INFO (qtp2013245539-3256) [n:127.0.0.1:60156_px
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/px path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 18
[junit4] 2> 630979 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60180,localport=60138], receiveBufferSize:408300
[junit4] 2> 630981 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60139,localport=60181], receiveBufferSize=408300
[junit4] 2> 630983 INFO (qtp1451388136-3228) [n:127.0.0.1:60138_px
c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/px path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=1
[junit4] 2> 630985 INFO (SocketProxy-Acceptor-60161) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60182,localport=60161], receiveBufferSize:408300
[junit4] 2> 630986 INFO (SocketProxy-Acceptor-60161) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60162,localport=60183], receiveBufferSize=408300
[junit4] 2> 630989 INFO (qtp1820604187-3286) [n:127.0.0.1:60161_px
c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/px path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 630990 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60184,localport=60156], receiveBufferSize:408300
[junit4] 2> 630992 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60157,localport=60185], receiveBufferSize=408300
[junit4] 2> 630994 INFO (qtp2013245539-3259) [n:127.0.0.1:60156_px
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/px path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 632998 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase Creating collection with stateFormat=1:
c8n_1x3_lf
[junit4] 2> 633000 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60186,localport=60156], receiveBufferSize:408300
[junit4] 2> 633000 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60157,localport=60187], receiveBufferSize=408300
[junit4] 2> 633002 INFO (qtp2013245539-3254) [n:127.0.0.1:60156_px ]
o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params
replicationFactor=3&maxShardsPerNode=1&name=c8n_1x3_lf&action=CREATE&numShards=1&stateFormat=1&wt=javabin&version=2
and sendToOCPQueue=true
[junit4] 2> 633008 INFO
(OverseerThreadFactory-1696-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px ] o.a.s.c.CreateCollectionCmd Create collection
c8n_1x3_lf
[junit4] 2> 633009 INFO
(OverseerThreadFactory-1696-thread-1-processing-n:127.0.0.1:60122_px)
[n:127.0.0.1:60122_px ] o.a.s.c.CreateCollectionCmd Only one config set
found in zk - using it:conf1
[junit4] 2> 633121 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60190,localport=60156], receiveBufferSize:408300
[junit4] 2> 633121 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60188,localport=60138], receiveBufferSize:408300
[junit4] 2> 633121 INFO (SocketProxy-Acceptor-60161) [ ]
o.a.s.c.SocketProxy accepted
Socket[addr=/127.0.0.1,port=60189,localport=60161], receiveBufferSize:408300
[junit4] 2> 633123 INFO (SocketProxy-Acceptor-60161) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60162,localport=60193], receiveBufferSize=408300
[junit4] 2> 633125 INFO (qtp1820604187-3283) [n:127.0.0.1:60161_px ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica3&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
[junit4] 2> 633125 INFO (SocketProxy-Acceptor-60138) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60139,localport=60192], receiveBufferSize=408300
[junit4] 2> 633125 INFO (SocketProxy-Acceptor-60156) [ ]
o.a.s.c.SocketProxy proxy connection
Socket[addr=/127.0.0.1,port=60157,localport=60191], receiveBufferSize=408300
[junit4] 2> 633128 INFO (qtp2013245539-3258) [n:127.0.0.1:60156_px ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica2&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
[junit4] 2> 633128 INFO (qtp1451388136-3229) [n:127.0.0.1:60138_px ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica1&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
[junit4] 2> 634170 WARN (qtp1820604187-3283) [n:127.0.0.1:60161_px
c:c8n_1x3_lf s:shard1 x:c8n_1x3_lf_shard1_replica3] o.a.s.c.Config Beginning
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
[junit4] 2> 634170 WARN (qtp1451388136-3229) [n:127.0.0.1:60138_px
c:c8n_1x3_lf s:shard1 x:c8n_1x3_lf_shard1_replica1] o.a.s.c.Config Beginning
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
[junit4] 2> 634170 WARN (qtp2013245539-3258) [n:127.0.0.1:60156_px
c:c8n_1x3_lf s:shard1 x:c8n_1x3_lf_shard1_replica2] o.a.s.c.Config Beginning
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
[junit4] 2> 634171 INFO (qtp1820604187-3283) [n:127.0.0.1:60161_px
c:c8n_1x3_lf s:shard1 x:c8n_1x3_lf_shard1_replica3] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 6.6.0
[junit4] 2> 634171 INFO (qtp1451388136-3229) [n:127.0.0.1:60138_px c:c
[...truncated too long message...]
ndler.java:462)
[junit4] 2> at
org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:134)
[junit4] 2> at
org.eclipse.jetty.server.Server.handle(Server.java:534)
[junit4] 2> at
org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:320)
[junit4] 2> at
org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:251)
[junit4] 2> at
org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:273)
[junit4] 2> at
org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:95)
[junit4] 2> at
org.eclipse.jetty.io.SelectChannelEndPoint$2.run(SelectChannelEndPoint.java:93)
[junit4] 2> at
org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.executeProduceConsume(ExecuteProduceConsume.java:303)
[junit4] 2> at
org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.produceConsume(ExecuteProduceConsume.java:148)
[junit4] 2> at
org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.run(ExecuteProduceConsume.java:136)
[junit4] 2> at
org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:671)
[junit4] 2> at
org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:589)
[junit4] 2> at java.lang.Thread.run(Thread.java:745)
[junit4] 2>
[junit4] 2> 751124 INFO (qtp2013245539-3259) [n:127.0.0.1:60156_px ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:60138_px&onlyIfLeaderActive=true&core=c8n_1x3_lf_shard1_replica2&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=400 QTime=86573
[junit4] 2> 751124 INFO (qtp2013245539-3254) [n:127.0.0.1:60156_px ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:60138_px&onlyIfLeaderActive=true&core=c8n_1x3_lf_shard1_replica2&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=400 QTime=96583
[junit4] 2> 751124 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ChaosMonkey monkey: stop shard! 60161
[junit4] 2> 751126 INFO
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.ZkTestServer connecting to 127.0.0.1:60100 60100
[junit4] 2> 752286 INFO (Thread-747) [ ] o.a.s.c.ZkTestServer
connecting to 127.0.0.1:60100 60100
[junit4] 2> 761275 WARN (Thread-747) [ ] o.a.s.c.ZkTestServer Watch
limit violations:
[junit4] 2> Maximum concurrent create/delete watches above limit:
[junit4] 2>
[junit4] 2> 5 /solr/aliases.json
[junit4] 2> 5 /solr/clusterprops.json
[junit4] 2> 4 /solr/security.json
[junit4] 2> 4 /solr/configs/conf1
[junit4] 2> 3 /solr/collections/c8n_1x3_lf/state.json
[junit4] 2> 3 /solr/collections/collection1/state.json
[junit4] 2>
[junit4] 2> Maximum concurrent data watches above limit:
[junit4] 2>
[junit4] 2> 5 /solr/clusterstate.json
[junit4] 2>
[junit4] 2> Maximum concurrent children watches above limit:
[junit4] 2>
[junit4] 2> 127 /solr/overseer/collection-queue-work
[junit4] 2> 56 /solr/overseer/queue
[junit4] 2> 7 /solr/overseer/queue-work
[junit4] 2> 5 /solr/live_nodes
[junit4] 2> 5 /solr/collections
[junit4] 2>
[junit4] 2> 761275 WARN
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SocketProxy Closing 1 connections to: http://127.0.0.1:60122/px,
target: http://127.0.0.1:60123/px
[junit4] 2> 761276 WARN
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SocketProxy Closing 0 connections to: http://127.0.0.1:60138/px,
target: http://127.0.0.1:60139/px
[junit4] 2> 761276 WARN
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SocketProxy Closing 0 connections to: http://127.0.0.1:60161/px,
target: http://127.0.0.1:60162/px
[junit4] 2> 761276 WARN
(TEST-LeaderFailoverAfterPartitionTest.test-seed#[4C69E9CF74932785]) [ ]
o.a.s.c.SocketProxy Closing 16 connections to: http://127.0.0.1:60156/px,
target: http://127.0.0.1:60157/px
[junit4] 2> NOTE: reproduce with: ant test
-Dtestcase=LeaderFailoverAfterPartitionTest -Dtests.method=test
-Dtests.seed=4C69E9CF74932785 -Dtests.slow=true -Dtests.locale=ga-IE
-Dtests.timezone=Asia/Krasnoyarsk -Dtests.asserts=true
-Dtests.file.encoding=US-ASCII
[junit4] FAILURE 142s J0 | LeaderFailoverAfterPartitionTest.test <<<
[junit4] > Throwable #1: java.lang.AssertionError: Expected 2 of 3
replicas to be active but only found 1;
[core_node3:{"core":"c8n_1x3_lf_shard1_replica2","base_url":"http://127.0.0.1:60156/px","node_name":"127.0.0.1:60156_px","state":"active","leader":"true"}];
clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/30)={
[junit4] > "replicationFactor":"3",
[junit4] > "shards":{"shard1":{
[junit4] > "range":"80000000-7fffffff",
[junit4] > "state":"active",
[junit4] > "replicas":{
[junit4] > "core_node1":{
[junit4] > "core":"c8n_1x3_lf_shard1_replica3",
[junit4] > "base_url":"http://127.0.0.1:60161/px",
[junit4] > "node_name":"127.0.0.1:60161_px",
[junit4] > "state":"down"},
[junit4] > "core_node2":{
[junit4] > "state":"down",
[junit4] > "base_url":"http://127.0.0.1:60138/px",
[junit4] > "core":"c8n_1x3_lf_shard1_replica1",
[junit4] > "node_name":"127.0.0.1:60138_px"},
[junit4] > "core_node3":{
[junit4] > "core":"c8n_1x3_lf_shard1_replica2",
[junit4] > "base_url":"http://127.0.0.1:60156/px",
[junit4] > "node_name":"127.0.0.1:60156_px",
[junit4] > "state":"active",
[junit4] > "leader":"true"}}}},
[junit4] > "router":{"name":"compositeId"},
[junit4] > "maxShardsPerNode":"1",
[junit4] > "autoAddReplicas":"false"}
[junit4] > at
__randomizedtesting.SeedInfo.seed([4C69E9CF74932785:C43DD615DA6F4A7D]:0)
[junit4] > at
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.testRf3WithLeaderFailover(LeaderFailoverAfterPartitionTest.java:168)
[junit4] > at
org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test(LeaderFailoverAfterPartitionTest.java:55)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
[junit4] > at java.lang.Thread.run(Thread.java:745)
[junit4] 2> 761280 INFO
(SUITE-LeaderFailoverAfterPartitionTest-seed#[4C69E9CF74932785]-worker) [ ]
o.a.s.SolrTestCaseJ4 ###deleteCore
[junit4] 2> NOTE: leaving temporary files on disk at:
/Users/jenkins/workspace/Lucene-Solr-6.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.cloud.LeaderFailoverAfterPartitionTest_4C69E9CF74932785-001
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene62):
{range_facet_l_dv=PostingsFormat(name=Memory doPackFST= true),
multiDefault=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))),
a_t=PostingsFormat(name=Memory doPackFST= false),
intDefault=PostingsFormat(name=Memory doPackFST= false),
id=PostingsFormat(name=Memory doPackFST= true),
range_facet_i_dv=PostingsFormat(name=Memory doPackFST= false), text=FST50,
range_facet_l=PostingsFormat(name=Memory doPackFST= false),
timestamp=PostingsFormat(name=Memory doPackFST= false)},
docValues:{range_facet_l_dv=DocValuesFormat(name=Direct),
_version_=DocValuesFormat(name=Lucene54),
range_facet_i_dv=DocValuesFormat(name=Lucene54),
intDvoDefault=DocValuesFormat(name=Lucene54),
timestamp=DocValuesFormat(name=Lucene54)}, maxPointsInLeafNode=1068,
maxMBSortInHeap=5.101976148949538,
sim=RandomSimilarity(queryNorm=true,coord=no): {}, locale=ga-IE,
timezone=Asia/Krasnoyarsk
[junit4] 2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_121
(64-bit)/cpus=3,threads=1,free=15847992,total=169549824
[junit4] 2> NOTE: All tests run in this JVM: [TestMaxScoreQueryParser,
TestSolrDeletionPolicy1, ConcurrentDeleteAndCreateCollectionTest,
CdcrRequestHandlerTest, TimeZoneUtilsTest, TestNRTOpen, TestUpdate,
BlockDirectoryTest, TestClusterStateMutator, TestWordDelimiterFilterFactory,
TestSSLRandomization, PeerSyncWithIndexFingerprintCachingTest,
TestDistributedSearch, IndexSchemaRuntimeFieldTest, DateFieldTest,
BlockJoinFacetDistribTest, TestPointFields, MultiTermTest,
DirectSolrConnectionTest, TestLazyCores, TestCSVLoader, TestStressVersions,
TestDownShardTolerantSearch, CloneFieldUpdateProcessorFactoryTest,
TestDFISimilarityFactory, TestOverriddenPrefixQueryForCustomFieldType,
QueryEqualityTest, DistributedFacetPivotLongTailTest,
SharedFSAutoReplicaFailoverTest, TestInfoStreamLogging, DeleteReplicaTest,
TestCorePropertiesReload, CurrencyFieldXmlFileTest, TestXmlQParser,
SmileWriterTest, TestSolrDynamicMBean, TestMultiWordSynonyms,
TestRawTransformer, FileUtilsTest, TestSweetSpotSimilarityFactory,
TestMiniSolrCloudCluster, QueryResultKeyTest, SolrPluginUtilsTest,
UtilsToolTest, ChaosMonkeyNothingIsSafeTest, EnumFieldTest,
CoreAdminRequestStatusTest, DistributedQueueTest, SparseHLLTest,
CachingDirectoryFactoryTest, BadComponentTest, ReplicationFactorTest,
DocumentBuilderTest, TestCloudInspectUtil, TestExportWriter,
RequestLoggingTest, TestJsonRequest, TestStressLucene,
OverseerModifyCollectionTest, SchemaVersionSpecificBehaviorTest,
LeaderFailoverAfterPartitionTest]
[junit4] Completed [96/704 (1!)] on J0 in 141.85s, 1 test, 1 failure <<<
FAILURES!
[...truncated 45374 lines...]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]