Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3847/
Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseConcMarkSweepGC
1 tests failed.
FAILED: org.apache.solr.cloud.PeerSyncReplicationTest.test
Error Message:
timeout waiting to see all nodes active
Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
at
__randomizedtesting.SeedInfo.seed([EC6CD25CB9E2753C:6438ED86171E18C4]:0)
at org.junit.Assert.fail(Assert.java:93)
at
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
at
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
at
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
at
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.lang.Thread.run(Thread.java:745)
Build Log:
[...truncated 11557 lines...]
[junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
[junit4] 2> Creating dataDir:
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/init-core-data-001
[junit4] 2> 1065935 INFO
(SUITE-PeerSyncReplicationTest-seed#[EC6CD25CB9E2753C]-worker) [ ]
o.a.s.SolrTestCaseJ4 Using TrieFields
[junit4] 2> 1065935 INFO
(SUITE-PeerSyncReplicationTest-seed#[EC6CD25CB9E2753C]-worker) [ ]
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via:
@org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
w/ MAC_OS_X supressed clientAuth
[junit4] 2> 1065935 INFO
(SUITE-PeerSyncReplicationTest-seed#[EC6CD25CB9E2753C]-worker) [ ]
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /fu/x
[junit4] 2> 1065937 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 1065938 INFO (Thread-1558) [ ] o.a.s.c.ZkTestServer client
port:0.0.0.0/0.0.0.0:0
[junit4] 2> 1065938 INFO (Thread-1558) [ ] o.a.s.c.ZkTestServer
Starting server
[junit4] 2> 1066041 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkTestServer start zk server on port:64737
[junit4] 2> 1066070 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
to /configs/conf1/solrconfig.xml
[junit4] 2> 1066076 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml
to /configs/conf1/schema.xml
[junit4] 2> 1066079 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 1066081 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
to /configs/conf1/stopwords.txt
[junit4] 2> 1066084 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt
to /configs/conf1/protwords.txt
[junit4] 2> 1066087 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml
to /configs/conf1/currency.xml
[junit4] 2> 1066090 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
to /configs/conf1/enumsConfig.xml
[junit4] 2> 1066092 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
to /configs/conf1/open-exchange-rates.json
[junit4] 2> 1066095 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 1066099 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
to /configs/conf1/old_synonyms.txt
[junit4] 2> 1066102 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
to /configs/conf1/synonyms.txt
[junit4] 2> 1067375 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/control-001/cores/collection1
[junit4] 2> 1067377 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1067379 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@5b05dc84{/fu/x,null,AVAILABLE}
[junit4] 2> 1067379 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@37f5c757{HTTP/1.1,[http/1.1]}{127.0.0.1:64744}
[junit4] 2> 1067379 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server Started @1069370ms
[junit4] 2> 1067379 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/tempDir-001/control/data,
hostContext=/fu/x, hostPort=64744,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/control-001/cores}
[junit4] 2> 1067379 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1067380 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1067380 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1067380 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1067380 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-02-21T18:51:41.222Z
[junit4] 2> 1067385 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1067385 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/control-001/solr.xml
[junit4] 2> 1067403 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64737/solr
[junit4] 2> 1067449 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 1067451 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.OverseerElectionContext I am going to be
the leader 127.0.0.1:64744_fu%2Fx
[junit4] 2> 1067453 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.Overseer Overseer
(id=97498110347313156-127.0.0.1:64744_fu%2Fx-n_0000000000) starting
[junit4] 2> 1067467 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:64744_fu%2Fx
[junit4] 2> 1067470 INFO
(zkCallback-1104-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (1)
[junit4] 2> 1067560 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/control-001/cores
[junit4] 2> 1067560 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 1067565 INFO
(OverseerStateUpdate-97498110347313156-127.0.0.1:64744_fu%2Fx-n_0000000000)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.o.ReplicaMutator Assigning new node to
shard shard=shard1
[junit4] 2> 1068593 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
[junit4] 2> 1068612 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.s.IndexSchema [collection1] Schema name=test
[junit4] 2> 1068715 WARN
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.s.IndexSchema [collection1] default search field in schema is text.
WARNING: Deprecated, please use 'df' on request instead.
[junit4] 2> 1068718 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 1068742 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from
collection control_collection
[junit4] 2> 1068742 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/control-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/control-001/cores/collection1/data/]
[junit4] 2> 1068742 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2876b27b
[junit4] 2> 1068746 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy:
minMergeSize=0, mergeFactor=10, maxMergeSize=2073899424,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.1]
[junit4] 2> 1068768 WARN
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 1068835 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.UpdateHandler Using UpdateLog implementation:
org.apache.solr.update.UpdateLog
[junit4] 2> 1068835 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH
numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1068836 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 1068836 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 1068845 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy:
minMergeSize=1000, mergeFactor=15, maxMergeSize=9223372036854775807,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.3282894849860287]
[junit4] 2> 1068845 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@3f7608ae[collection1] main]
[junit4] 2> 1068848 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1068848 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1068850 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 1068851 INFO
(searcherExecutor-3690-thread-1-processing-n:127.0.0.1:64744_fu%2Fx
x:collection1 c:control_collection) [n:127.0.0.1:64744_fu%2Fx
c:control_collection x:collection1] o.a.s.c.SolrCore [collection1] Registered
new searcher Searcher@3f7608ae[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1068852 INFO
(coreLoadExecutor-3689-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx c:control_collection x:collection1]
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using
new clock 1559969768610463744
[junit4] 2> 1068864 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas
found to continue.
[junit4] 2> 1068864 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new
leader - try and sync
[junit4] 2> 1068864 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:64744/fu/x/collection1/
[junit4] 2> 1068864 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync
replicas to me
[junit4] 2> 1068864 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy
http://127.0.0.1:64744/fu/x/collection1/ has no replicas
[junit4] 2> 1068864 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Found all
replicas participating in election, clear LIR
[junit4] 2> 1068872 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new
leader: http://127.0.0.1:64744/fu/x/collection1/ shard1
[junit4] 2> 1068964 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1068966 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:64737/solr ready
[junit4] 2> 1068966 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection
loss:false
[junit4] 2> 1068993 INFO
(coreZkRegister-3682-thread-1-processing-n:127.0.0.1:64744_fu%2Fx x:collection1
c:control_collection) [n:127.0.0.1:64744_fu%2Fx c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery
necessary
[junit4] 2> 1069272 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001/cores/collection1
[junit4] 2> 1069273 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001
[junit4] 2> 1069274 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1069276 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@ab969c{/fu/x,null,AVAILABLE}
[junit4] 2> 1069276 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@2c47d54d{HTTP/1.1,[http/1.1]}{127.0.0.1:64750}
[junit4] 2> 1069276 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server Started @1071267ms
[junit4] 2> 1069276 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/tempDir-001/jetty1,
solrconfig=solrconfig.xml, hostContext=/fu/x, hostPort=64750,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001/cores}
[junit4] 2> 1069277 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1069277 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1069277 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1069277 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1069277 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-02-21T18:51:43.119Z
[junit4] 2> 1069282 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1069282 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001/solr.xml
[junit4] 2> 1069293 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64737/solr
[junit4] 2> 1069311 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (1)
[junit4] 2> 1069317 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 1069321 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:64750_fu%2Fx
[junit4] 2> 1069324 INFO
(zkCallback-1104-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (1) -> (2)
[junit4] 2> 1069324 INFO
(zkCallback-1113-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (1) -> (2)
[junit4] 2> 1069325 INFO (zkCallback-1108-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1069417 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001/cores
[junit4] 2> 1069417 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 1069433 INFO
(OverseerStateUpdate-97498110347313156-127.0.0.1:64744_fu%2Fx-n_0000000000)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.o.ReplicaMutator Assigning new node to
shard shard=shard1
[junit4] 2> 1070450 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.c.SolrConfig
Using Lucene MatchVersion: 7.0.0
[junit4] 2> 1070464 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1070570 WARN
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1070572 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 1070592 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1070592 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-1-001/cores/collection1/data/]
[junit4] 2> 1070593 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2876b27b
[junit4] 2> 1070596 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy:
minMergeSize=0, mergeFactor=10, maxMergeSize=2073899424,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.1]
[junit4] 2> 1070608 WARN
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 1070674 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1070674 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1070675 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.u.CommitTracker
Hard AutoCommit: disabled
[junit4] 2> 1070675 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.u.CommitTracker
Soft AutoCommit: disabled
[junit4] 2> 1070675 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy:
minMergeSize=1000, mergeFactor=15, maxMergeSize=9223372036854775807,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.3282894849860287]
[junit4] 2> 1070676 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@4130b4f3[collection1] main]
[junit4] 2> 1070678 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1070678 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1070678 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 1070679 INFO
(searcherExecutor-3701-thread-1-processing-n:127.0.0.1:64750_fu%2Fx
x:collection1 c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@4130b4f3[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1070680 INFO
(coreLoadExecutor-3700-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateLog
Could not find max version in index or recent updates, using new clock
1559969770527260672
[junit4] 2> 1070691 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to
continue.
[junit4] 2> 1070691 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try
and sync
[junit4] 2> 1070691 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:64750/fu/x/collection1/
[junit4] 2> 1070691 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 1070691 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:64750/fu/x/collection1/
has no replicas
[junit4] 2> 1070691 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas
participating in election, clear LIR
[junit4] 2> 1070699 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader:
http://127.0.0.1:64750/fu/x/collection1/ shard1
[junit4] 2> 1070811 INFO
(coreZkRegister-3695-thread-1-processing-n:127.0.0.1:64750_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64750_fu%2Fx c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 1071142 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001/cores/collection1
[junit4] 2> 1071143 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001
[junit4] 2> 1071144 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1071146 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@75926b3b{/fu/x,null,AVAILABLE}
[junit4] 2> 1071146 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@683f1b18{HTTP/1.1,[http/1.1]}{127.0.0.1:64755}
[junit4] 2> 1071146 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server Started @1073137ms
[junit4] 2> 1071146 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/tempDir-001/jetty2,
solrconfig=solrconfig.xml, hostContext=/fu/x, hostPort=64755,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001/cores}
[junit4] 2> 1071147 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1071147 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1071147 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1071148 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1071148 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-02-21T18:51:44.990Z
[junit4] 2> 1071152 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1071152 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001/solr.xml
[junit4] 2> 1071164 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64737/solr
[junit4] 2> 1071183 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (2)
[junit4] 2> 1071191 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 1071195 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:64755_fu%2Fx
[junit4] 2> 1071198 INFO
(zkCallback-1104-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (2) -> (3)
[junit4] 2> 1071198 INFO
(zkCallback-1113-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (2) -> (3)
[junit4] 2> 1071198 INFO (zkCallback-1108-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1071200 INFO
(zkCallback-1119-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (2) -> (3)
[junit4] 2> 1071352 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001/cores
[junit4] 2> 1071352 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 1071356 INFO
(OverseerStateUpdate-97498110347313156-127.0.0.1:64744_fu%2Fx-n_0000000000)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.o.ReplicaMutator Assigning new node to
shard shard=shard1
[junit4] 2> 1072375 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.c.SolrConfig
Using Lucene MatchVersion: 7.0.0
[junit4] 2> 1072389 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1072489 WARN
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1072491 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 1072510 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1072510 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-2-001/cores/collection1/data/]
[junit4] 2> 1072511 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2876b27b
[junit4] 2> 1072514 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy:
minMergeSize=0, mergeFactor=10, maxMergeSize=2073899424,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.1]
[junit4] 2> 1072527 WARN
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 1072589 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1072589 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1072590 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.u.CommitTracker
Hard AutoCommit: disabled
[junit4] 2> 1072590 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.u.CommitTracker
Soft AutoCommit: disabled
[junit4] 2> 1072591 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy:
minMergeSize=1000, mergeFactor=15, maxMergeSize=9223372036854775807,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.3282894849860287]
[junit4] 2> 1072592 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@70a448be[collection1] main]
[junit4] 2> 1072593 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1072594 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1072594 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 1072595 INFO
(searcherExecutor-3712-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 c:collection1) [n:127.0.0.1:64755_fu%2Fx c:collection1
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@70a448be[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1072596 INFO
(coreLoadExecutor-3711-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateLog
Could not find max version in index or recent updates, using new clock
1559969772536332288
[junit4] 2> 1072611 INFO
(coreZkRegister-3706-thread-1-processing-n:127.0.0.1:64755_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64755_fu%2Fx c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 1072612 INFO
(updateExecutor-1116-thread-1-processing-n:127.0.0.1:64755_fu%2Fx x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx c:collection1
s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running
recovery
[junit4] 2> 1072612 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Starting recovery process. recoveringAfterStartup=true
[junit4] 2> 1072612 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
###### startupVersions=[[]]
[junit4] 2> 1072612 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Begin buffering updates. core=[collection1]
[junit4] 2> 1072612 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting
to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 1072612 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Publishing state of core [collection1] as recovering, leader is
[http://127.0.0.1:64750/fu/x/collection1/] and I am
[http://127.0.0.1:64755/fu/x/collection1/]
[junit4] 2> 1072617 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Sending prep recovery command to [http://127.0.0.1:64750/fu/x]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:64755_fu%252Fx&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 1072621 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 1072621 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard1 of collection1) have state: recovering
[junit4] 2> 1072622 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering):
collection=collection1, shard=shard1, thisCore=collection1,
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true,
currentState=down, localState=active, nodeName=127.0.0.1:64755_fu%2Fx,
coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps:
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:64755/fu/x","node_name":"127.0.0.1:64755_fu%2Fx","state":"down"}
[junit4] 2> 1073090 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001/cores/collection1
[junit4] 2> 1073091 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001
[junit4] 2> 1073092 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1073094 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@4c408276{/fu/x,null,AVAILABLE}
[junit4] 2> 1073094 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@28549003{HTTP/1.1,[http/1.1]}{127.0.0.1:64760}
[junit4] 2> 1073094 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.Server Started @1075085ms
[junit4] 2> 1073094 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/tempDir-001/jetty3,
solrconfig=solrconfig.xml, hostContext=/fu/x, hostPort=64760,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001/cores}
[junit4] 2> 1073096 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1073096 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1073096 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1073096 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1073096 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-02-21T18:51:46.938Z
[junit4] 2> 1073100 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1073100 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001/solr.xml
[junit4] 2> 1073111 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64737/solr
[junit4] 2> 1073127 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64760_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (3)
[junit4] 2> 1073132 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64760_fu%2Fx ] o.a.s.c.Overseer Overseer (id=null) closing
[junit4] 2> 1073136 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64760_fu%2Fx ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:64760_fu%2Fx
[junit4] 2> 1073139 INFO
(zkCallback-1119-thread-1-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (3) -> (4)
[junit4] 2> 1073139 INFO
(zkCallback-1113-thread-1-processing-n:127.0.0.1:64750_fu%2Fx)
[n:127.0.0.1:64750_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (3) -> (4)
[junit4] 2> 1073139 INFO
(zkCallback-1104-thread-1-processing-n:127.0.0.1:64744_fu%2Fx)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (3) -> (4)
[junit4] 2> 1073139 INFO (zkCallback-1108-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1073140 INFO
(zkCallback-1126-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (3) -> (4)
[junit4] 2> 1073240 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64760_fu%2Fx ] o.a.s.c.CorePropertiesLocator Found 1 core
definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001/cores
[junit4] 2> 1073240 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C])
[n:127.0.0.1:64760_fu%2Fx ] o.a.s.c.CorePropertiesLocator Cores are:
[collection1]
[junit4] 2> 1073248 INFO
(OverseerStateUpdate-97498110347313156-127.0.0.1:64744_fu%2Fx-n_0000000000)
[n:127.0.0.1:64744_fu%2Fx ] o.a.s.c.o.ReplicaMutator Assigning new node to
shard shard=shard1
[junit4] 2> 1073629 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering):
collection=collection1, shard=shard1, thisCore=collection1,
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true,
currentState=recovering, localState=active, nodeName=127.0.0.1:64755_fu%2Fx,
coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps:
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:64755/fu/x","node_name":"127.0.0.1:64755_fu%2Fx","state":"recovering"}
[junit4] 2> 1073629 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 1073629 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:64755_fu%252Fx&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1008
[junit4] 2> 1074272 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.c.SolrConfig
Using Lucene MatchVersion: 7.0.0
[junit4] 2> 1074291 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1074401 WARN
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1074404 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.s.IndexSchema
Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 1074426 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1074427 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001/shard-3-001/cores/collection1/data/]
[junit4] 2> 1074427 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2876b27b
[junit4] 2> 1074430 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy:
minMergeSize=0, mergeFactor=10, maxMergeSize=2073899424,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.1]
[junit4] 2> 1074442 WARN
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 1074506 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1074506 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1074507 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.u.CommitTracker
Hard AutoCommit: disabled
[junit4] 2> 1074507 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.u.CommitTracker
Soft AutoCommit: disabled
[junit4] 2> 1074508 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy:
minMergeSize=1000, mergeFactor=15, maxMergeSize=9223372036854775807,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.3282894849860287]
[junit4] 2> 1074508 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@7b9a82ad[collection1] main]
[junit4] 2> 1074510 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1074511 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1074511 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 1074512 INFO
(searcherExecutor-3723-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 c:collection1) [n:127.0.0.1:64760_fu%2Fx c:collection1
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@7b9a82ad[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1074513 INFO
(coreLoadExecutor-3722-thread-1-processing-n:127.0.0.1:64760_fu%2Fx)
[n:127.0.0.1:64760_fu%2Fx c:collection1 x:collection1] o.a.s.u.UpdateLog
Could not find max version in index or recent updates, using new clock
1559969774546452480
[junit4] 2> 1074518 INFO
(coreZkRegister-3717-thread-1-processing-n:127.0.0.1:64760_fu%2Fx x:collection1
c:collection1) [n:127.0.0.1:64760_fu%2Fx c:collection1 s:shard1 r:core_node3
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 1074519 INFO
(updateExecutor-1123-thread-1-processing-n:127.0.0.1:64760_fu%2Fx x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx c:collection1
s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running
recovery
[junit4] 2> 1074519 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Starting recovery process. recoveringAfterStartup=true
[junit4] 2> 1074519 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
###### startupVersions=[[]]
[junit4] 2> 1074519 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Begin buffering updates. core=[collection1]
[junit4] 2> 1074519 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting
to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 1074519 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Publishing state of core [collection1] as recovering, leader is
[http://127.0.0.1:64750/fu/x/collection1/] and I am
[http://127.0.0.1:64760/fu/x/collection1/]
[junit4] 2> 1074525 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Sending prep recovery command to [http://127.0.0.1:64750/fu/x]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:64760_fu%252Fx&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 1074526 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 1074527 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard1 of collection1) have state: recovering
[junit4] 2> 1074527 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering):
collection=collection1, shard=shard1, thisCore=collection1,
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true,
currentState=down, localState=active, nodeName=127.0.0.1:64760_fu%2Fx,
coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:64760/fu/x","node_name":"127.0.0.1:64760_fu%2Fx","state":"down"}
[junit4] 2> 1074588 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.SolrTestCaseJ4 ###Starting test
[junit4] 2> 1074588 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30
for each attempt
[junit4] 2> 1074588 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection:
collection1 failOnTimeout:true timeout (sec):30
[junit4] 2> 1075528 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering):
collection=collection1, shard=shard1, thisCore=collection1,
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true,
currentState=recovering, localState=active, nodeName=127.0.0.1:64760_fu%2Fx,
coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:64760/fu/x","node_name":"127.0.0.1:64760_fu%2Fx","state":"recovering"}
[junit4] 2> 1075528 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 1075528 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:64760_fu%252Fx&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1001
[junit4] 2> 1080631 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Attempting to PeerSync from [http://127.0.0.1:64750/fu/x/collection1/] -
recoveringAfterStartup=[true]
[junit4] 2> 1080631 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync:
core=collection1 url=http://127.0.0.1:64755/fu/x START
replicas=[http://127.0.0.1:64750/fu/x/collection1/] nUpdates=1000
[junit4] 2> 1080633 INFO (qtp1601158192-7316) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1080633 INFO (qtp1601158192-7316) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/fu/x path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=0
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are
already in sync. No need to do a PeerSync
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
PeerSync stage of recovery was successful.
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Replaying updates buffered during PeerSync.
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No
replay needed.
[junit4] 2> 1080635 INFO
(recoveryExecutor-1117-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Registering as Active after recovery.
[junit4] 2> 1082539 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Attempting to PeerSync from [http://127.0.0.1:64750/fu/x/collection1/] -
recoveringAfterStartup=[true]
[junit4] 2> 1082539 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync:
core=collection1 url=http://127.0.0.1:64760/fu/x START
replicas=[http://127.0.0.1:64750/fu/x/collection1/] nUpdates=1000
[junit4] 2> 1082541 INFO (qtp1601158192-7317) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1082541 INFO (qtp1601158192-7317) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/fu/x path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=0
[junit4] 2> 1082543 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1082543 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are
already in sync. No need to do a PeerSync
[junit4] 2> 1082543 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1082543 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1082544 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1082544 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
PeerSync stage of recovery was successful.
[junit4] 2> 1082544 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Replaying updates buffered during PeerSync.
[junit4] 2> 1082544 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No
replay needed.
[junit4] 2> 1082544 INFO
(recoveryExecutor-1124-thread-1-processing-n:127.0.0.1:64760_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy
Registering as Active after recovery.
[junit4] 2> 1082605 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
[junit4] 2> 1082608 INFO (qtp1365736663-7279) [n:127.0.0.1:64744_fu%2Fx
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1082608 INFO (qtp1365736663-7279) [n:127.0.0.1:64744_fu%2Fx
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1082609 INFO (qtp1365736663-7279) [n:127.0.0.1:64744_fu%2Fx
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1082609 INFO (qtp1365736663-7279) [n:127.0.0.1:64744_fu%2Fx
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 1
[junit4] 2> 1082616 INFO (qtp1069274809-7344) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1082616 INFO (qtp1069274809-7344) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1082616 INFO (qtp1069274809-7344) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1082616 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1082617 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1082618 INFO (qtp338757133-7379) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1082618 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1082618 INFO (qtp338757133-7379) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1082618 INFO (qtp1069274809-7344) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 2
[junit4] 2> 1082618 INFO (qtp338757133-7379) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1082618 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 2
[junit4] 2> 1082618 INFO (qtp338757133-7379) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 0
[junit4] 2> 1082619 INFO (qtp1601158192-7320) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 7
[junit4] 2> 1082621 INFO (qtp1601158192-7318) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/fu/x path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1082622 INFO (qtp1069274809-7346) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/fu/x path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1082624 INFO (qtp338757133-7381) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request
[collection1] webapp=/fu/x path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1084629 INFO (qtp1365736663-7280) [n:127.0.0.1:64744_fu%2Fx
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={wt=javabin&version=2}{deleteByQuery=*:* (-1559969785150701568)} 0 3
[junit4] 2> 1084634 INFO (qtp1069274809-7347) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&_version_=-1559969785154895872&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
(-1559969785154895872)} 0 2
[junit4] 2> 1084635 INFO (qtp338757133-7382) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&_version_=-1559969785154895872&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
(-1559969785154895872)} 0 2
[junit4] 2> 1084635 INFO (qtp1601158192-7317) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={wt=javabin&version=2}{deleteByQuery=*:* (-1559969785154895872)} 0 5
[junit4] 2> 1084656 INFO (qtp338757133-7383) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[0
(1559969785167478784)]} 0 11
[junit4] 2> 1084656 INFO (qtp1069274809-7348) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[0
(1559969785167478784)]} 0 11
[junit4] 2> 1084656 INFO (qtp1601158192-7319) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={wt=javabin&version=2}{add=[0 (1559969785167478784)]} 0 15
[junit4] 2> 1084659 INFO (qtp1069274809-7349) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[1
(1559969785184256000)]} 0 0
[junit4] 2> 1084659 INFO (qtp338757133-7384) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[1
(1559969785184256000)]} 0 0
[junit4] 2> 1084660 INFO (qtp1601158192-7313) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={wt=javabin&version=2}{add=[1 (1559969785184256000)]} 0 2
[junit4] 2> 1084662 INFO (qtp1069274809-7342) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[2
(1559969785187401728)]} 0 0
[junit4] 2> 1084662 INFO (qtp338757133-7377) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[2
(1559969785187401728)]} 0 0
[junit4] 2> 1084662 INFO (qtp1601158192-7315) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={wt=javabin&version=2}{add=[2 (1559969785187401728)]} 0 1
[junit4] 2> 1084670 INFO (qtp338757133-7377) [n:127.0.0.1:64760_fu%2Fx
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[3
(1559969785194741760)]} 0 0
[junit4] 2> 1084670 INFO (qtp1069274809-7342) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64750/fu/x/collection1/&wt=javabin&version=2}{add=[3
(1559969785194741760)]} 0 0
[junit4] 2> 1084670 INFO (qtp1601158192-7320) [n:127.0.0.1:64750_fu%2Fx
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp=/fu/x path=/update
params={wt=javabin&version=2}{add=[3 (1559969785194741760)]} 0 2
[junit4] 2> 1084673 INFO (qtp1069274809-7345) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_n
[...truncated too long message...]
-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ChaosMonkey monkey: stop shard! 64755
[junit4] 2> 1272385 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.CoreContainer Shutting down CoreContainer instance=1101699537
[junit4] 2> 1272386 WARN
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1]
coreNodeName=[core_node2]
[junit4] 2> 1275169 INFO
(recoveryExecutor-1131-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
RecoveryStrategy has been closed
[junit4] 2> 1275169 INFO
(recoveryExecutor-1131-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Finished recovery process, successful=[false]
[junit4] 2> 1275169 INFO
(recoveryExecutor-1131-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SolrCore
[collection1] CLOSING SolrCore org.apache.solr.core.SolrCore@65f81c39
[junit4] 2> 1275169 WARN
(recoveryExecutor-1131-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy
Stopping recovery for core=[collection1] coreNodeName=[core_node2]
[junit4] 2> 1275218 INFO
(recoveryExecutor-1131-thread-1-processing-n:127.0.0.1:64755_fu%2Fx
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64755_fu%2Fx
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.m.SolrMetricManager
Closing metric reporters for: solr.core.collection1
[junit4] 2> 1275219 WARN
(updateExecutor-1130-thread-2-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.DefaultSolrCoreState Skipping recovery because Solr is shutdown
[junit4] 2> 1275221 WARN
(zkCallback-1133-thread-2-processing-n:127.0.0.1:64755_fu%2Fx)
[n:127.0.0.1:64755_fu%2Fx ] o.a.s.c.c.ZkStateReader ZooKeeper watch
triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for
/live_nodes]
[junit4] 2> 1275222 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
[junit4] 2> 1275223 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.AbstractConnector Stopped
ServerConnector@47f23470{HTTP/1.1,[http/1.1]}{127.0.0.1:64755}
[junit4] 2> 1275224 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.s.ServletContextHandler@ad2fdaa{/fu/x,null,UNAVAILABLE}
[junit4] 2> 1275225 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ChaosMonkey monkey: stop shard! 64760
[junit4] 2> 1275226 INFO
(TEST-PeerSyncReplicationTest.test-seed#[EC6CD25CB9E2753C]) [ ]
o.a.s.c.ZkTestServer connecting to 127.0.0.1:64737 64737
[junit4] 2> 1275243 INFO (Thread-1558) [ ] o.a.s.c.ZkTestServer
connecting to 127.0.0.1:64737 64737
[junit4] 2> 1276615 WARN (Thread-1558) [ ] o.a.s.c.ZkTestServer Watch
limit violations:
[junit4] 2> Maximum concurrent create/delete watches above limit:
[junit4] 2>
[junit4] 2> 6 /solr/aliases.json
[junit4] 2> 6 /solr/clusterprops.json
[junit4] 2> 5 /solr/security.json
[junit4] 2> 5 /solr/configs/conf1
[junit4] 2> 4 /solr/collections/collection1/state.json
[junit4] 2>
[junit4] 2> Maximum concurrent data watches above limit:
[junit4] 2>
[junit4] 2> 6 /solr/clusterstate.json
[junit4] 2> 2
/solr/collections/collection1/leader_elect/shard1/election/97498110347313160-core_node1-n_0000000000
[junit4] 2> 2
/solr/overseer_elect/election/97498110347313160-127.0.0.1:64750_fu%2Fx-n_0000000001
[junit4] 2>
[junit4] 2> Maximum concurrent children watches above limit:
[junit4] 2>
[junit4] 2> 204 /solr/overseer/collection-queue-work
[junit4] 2> 40 /solr/overseer/queue
[junit4] 2> 6 /solr/collections
[junit4] 2> 5 /solr/live_nodes
[junit4] 2> 4 /solr/overseer/queue-work
[junit4] 2>
[junit4] 2> NOTE: reproduce with: ant test
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test
-Dtests.seed=EC6CD25CB9E2753C -Dtests.slow=true -Dtests.locale=ms-MY
-Dtests.timezone=Asia/Dacca -Dtests.asserts=true -Dtests.file.encoding=US-ASCII
[junit4] FAILURE 211s J1 | PeerSyncReplicationTest.test <<<
[junit4] > Throwable #1: java.lang.AssertionError: timeout waiting to see
all nodes active
[junit4] > at
__randomizedtesting.SeedInfo.seed([EC6CD25CB9E2753C:6438ED86171E18C4]:0)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
[junit4] > at java.lang.Thread.run(Thread.java:745)
[junit4] 2> 1276622 INFO
(SUITE-PeerSyncReplicationTest-seed#[EC6CD25CB9E2753C]-worker) [ ]
o.a.s.SolrTestCaseJ4 ###deleteCore
[junit4] 2> NOTE: leaving temporary files on disk at:
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_EC6CD25CB9E2753C-001
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene70):
{other_tl1=PostingsFormat(name=Direct),
range_facet_l_dv=Lucene50(blocksize=128), rnd_s=FSTOrd50,
multiDefault=FSTOrd50, a_t=PostingsFormat(name=Direct),
intDefault=PostingsFormat(name=Direct), a_i1=PostingsFormat(name=Direct),
id=Lucene50(blocksize=128), range_facet_i_dv=PostingsFormat(name=Direct),
text=PostingsFormat(name=LuceneVarGapDocFreqInterval),
range_facet_l=PostingsFormat(name=Direct),
timestamp=PostingsFormat(name=Direct)},
docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene70),
_version_=DocValuesFormat(name=Lucene70),
range_facet_i_dv=DocValuesFormat(name=Lucene70),
intDvoDefault=DocValuesFormat(name=Asserting),
timestamp=DocValuesFormat(name=Lucene70)}, maxPointsInLeafNode=645,
maxMBSortInHeap=5.856085568697607, sim=RandomSimilarity(queryNorm=true): {},
locale=ms-MY, timezone=Asia/Dacca
[junit4] 2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_121
(64-bit)/cpus=3,threads=1,free=145119944,total=376696832
[junit4] 2> NOTE: All tests run in this JVM: [TestComponentsName,
TestSystemCollAutoCreate, TestGeoJSONResponseWriter,
LeaderInitiatedRecoveryOnCommitTest, TestAuthenticationFramework,
TermVectorComponentTest, BadIndexSchemaTest, DistributedFacetPivotSmallTest,
DeleteLastCustomShardedReplicaTest, CustomCollectionTest,
TestAnalyzeInfixSuggestions, ScriptEngineTest, TestReloadAndDeleteDocs,
ChaosMonkeySafeLeaderTest, TestSimpleTrackingShardHandler,
DocValuesNotIndexedTest, LukeRequestHandlerTest, ClusterStateTest,
TestManagedResourceStorage, SpellPossibilityIteratorTest, TestCustomSort,
RemoteQueryErrorTest, UpdateRequestProcessorFactoryTest,
SolrCmdDistributorTest, TestStressInPlaceUpdates,
SharedFSAutoReplicaFailoverTest, TestReload, SolrCoreMetricManagerTest,
TestSystemIdResolver, TestUnifiedSolrHighlighter, HdfsDirectoryFactoryTest,
BadCopyFieldTest, TestCustomDocTransformer, HdfsChaosMonkeySafeLeaderTest,
TestTolerantUpdateProcessorCloud, TestWriterPerf, TestFastWriter,
FullSolrCloudDistribCmdsTest, TestConfigSetImmutable, BlobRepositoryCloudTest,
TestFilteredDocIdSet, BlockJoinFacetDistribTest, SimpleMLTQParserTest,
ConvertedLegacyTest, SortByFunctionTest, SolrIndexSplitterTest,
TestClusterStateMutator, BaseCdcrDistributedZkTest,
HdfsWriteToMultipleCollectionsTest, TestPathTrie, TestCustomStream,
CdcrReplicationDistributedZkTest, TestDocTermOrds, TestFiltering,
TemplateUpdateProcessorTest, BasicDistributedZk2Test, UUIDFieldTest,
HighlighterConfigTest, SuggesterTest, TestMacros, TestQuerySenderListener,
TestZkChroot, ShardRoutingCustomTest, OpenCloseCoreStressTest, EchoParamsTest,
NotRequiredUniqueKeyTest, AddBlockUpdateTest, TestJsonRequest,
TestExactSharedStatsCache, DistributedTermsComponentTest, MultiThreadedOCPTest,
TestSubQueryTransformer, TestManagedSchemaThreadSafety, ReplicationFactorTest,
TestQueryTypes, PeerSyncWithIndexFingerprintCachingTest, TestMissingGroups,
TestOverriddenPrefixQueryForCustomFieldType, WrapperMergePolicyFactoryTest,
TestReRankQParserPlugin, TestConfigSetsAPIZkFailure, SpatialRPTFieldTypeTest,
MetricsHandlerTest, TestBadConfig, TestXmlQParser, AnalyticsMergeStrategyTest,
TestSchemaManager, SolrMetricReporterTest, JavabinLoaderTest, TestSolr4Spatial,
ManagedSchemaRoundRobinCloudTest, PreAnalyzedUpdateProcessorTest,
TestSSLRandomization, TestHdfsUpdateLog, SparseHLLTest,
ShufflingReplicaListTransformerTest, CursorMarkTest, RankQueryTest,
StatsComponentTest, TestConfigSetsAPI, TestSolrDeletionPolicy2,
TestDFISimilarityFactory, OverseerStatusTest, TestHashQParserPlugin, TestUtils,
PeerSyncReplicationTest]
[junit4] Completed [229/693 (1!)] on J1 in 210.71s, 1 test, 1 failure <<<
FAILURES!
[...truncated 64087 lines...]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]