Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Linux/18804/
Java: 64bit/jdk1.8.0_112 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC
1 tests failed.
FAILED: org.apache.solr.cloud.PeerSyncReplicationTest.test
Error Message:
timeout waiting to see all nodes active
Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
at
__randomizedtesting.SeedInfo.seed([684F6A462ED9C102:E01B559C8025ACFA]:0)
at org.junit.Assert.fail(Assert.java:93)
at
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
at
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
at
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
at
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
at java.lang.Thread.run(Thread.java:745)
Build Log:
[...truncated 12432 lines...]
[junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
[junit4] 2> Creating dataDir:
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/init-core-data-001
[junit4] 2> 1348265 INFO
(SUITE-PeerSyncReplicationTest-seed#[684F6A462ED9C102]-worker) [ ]
o.a.s.SolrTestCaseJ4 Using PointFields
[junit4] 2> 1348266 INFO
(SUITE-PeerSyncReplicationTest-seed#[684F6A462ED9C102]-worker) [ ]
o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via:
@org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
[junit4] 2> 1348266 INFO
(SUITE-PeerSyncReplicationTest-seed#[684F6A462ED9C102]-worker) [ ]
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
[junit4] 2> 1348268 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 1348268 INFO (Thread-2765) [ ] o.a.s.c.ZkTestServer client
port:0.0.0.0/0.0.0.0:0
[junit4] 2> 1348269 INFO (Thread-2765) [ ] o.a.s.c.ZkTestServer
Starting server
[junit4] 2> 1348368 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkTestServer start zk server on port:37552
[junit4] 2> 1348374 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
to /configs/conf1/solrconfig.xml
[junit4] 2> 1348375 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/schema.xml
to /configs/conf1/schema.xml
[junit4] 2> 1348376 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 1348376 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
to /configs/conf1/stopwords.txt
[junit4] 2> 1348377 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/protwords.txt
to /configs/conf1/protwords.txt
[junit4] 2> 1348377 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/currency.xml
to /configs/conf1/currency.xml
[junit4] 2> 1348378 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
to /configs/conf1/enumsConfig.xml
[junit4] 2> 1348378 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
to /configs/conf1/open-exchange-rates.json
[junit4] 2> 1348379 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 1348379 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
to /configs/conf1/old_synonyms.txt
[junit4] 2> 1348380 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractZkTestCase put
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
to /configs/conf1/synonyms.txt
[junit4] 2> 1348439 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/control-001/cores/collection1
[junit4] 2> 1348441 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1348441 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@3fc27d66{/,null,AVAILABLE}
[junit4] 2> 1348444 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@65143bd4{SSL,[ssl,
http/1.1]}{127.0.0.1:34140}
[junit4] 2> 1348444 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server Started @1350256ms
[junit4] 2> 1348444 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/tempDir-001/control/data,
hostContext=/, hostPort=34140,
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/control-001/cores}
[junit4] 2> 1348444 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1348444 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1348444 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1348444 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1348445 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-19T15:17:38.464Z
[junit4] 2> 1348446 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1348446 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/control-001/solr.xml
[junit4] 2> 1348452 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37552/solr
[junit4] 2> 1348462 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:34140_
] o.a.s.c.OverseerElectionContext I am going to be the leader
127.0.0.1:34140_
[junit4] 2> 1348462 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:34140_
] o.a.s.c.Overseer Overseer
(id=97310412523962373-127.0.0.1:34140_-n_0000000000) starting
[junit4] 2> 1348465 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:34140_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:34140_
[junit4] 2> 1348466 INFO
(zkCallback-2311-thread-1-processing-n:127.0.0.1:34140_) [n:127.0.0.1:34140_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1348524 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:34140_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/control-001/cores
[junit4] 2> 1348524 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:34140_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1348526 INFO
(OverseerStateUpdate-97310412523962373-127.0.0.1:34140_-n_0000000000)
[n:127.0.0.1:34140_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1349534 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.c.SolrConfig
Using Lucene MatchVersion: 7.0.0
[junit4] 2> 1349546 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1349624 WARN
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1349626 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.s.IndexSchema
Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 1349632 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection
control_collection
[junit4] 2> 1349632 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/control-001/cores/collection1],
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/control-001/cores/collection1/data/]
[junit4] 2> 1349632 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@26bc1020
[junit4] 2> 1349634 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy:
maxMergeAtOnce=12, maxMergeAtOnceExplicit=42, maxMergedSegmentMB=1.4111328125,
floorSegmentMB=1.1123046875, forceMergeDeletesPctAllowed=7.002848105698688,
segmentsPerTier=19.0, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.7745892724465951
[junit4] 2> 1349675 WARN
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 1349684 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1349684 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1349685 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.u.CommitTracker
Hard AutoCommit: disabled
[junit4] 2> 1349685 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.u.CommitTracker
Soft AutoCommit: disabled
[junit4] 2> 1349685 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.MockRandomMergePolicy:
org.apache.lucene.index.MockRandomMergePolicy@38b9d4f6
[junit4] 2> 1349686 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@50c604cd[collection1] main]
[junit4] 2> 1349686 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1349686 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1349686 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 1349687 INFO
(searcherExecutor-6733-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@50c604cd[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1349688 INFO
(coreLoadExecutor-6732-thread-1-processing-n:127.0.0.1:34140_)
[n:127.0.0.1:34140_ c:control_collection x:collection1] o.a.s.u.UpdateLog
Could not find max version in index or recent updates, using new clock
1556966601871327232
[junit4] 2> 1349692 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas
found to continue.
[junit4] 2> 1349692 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new
leader - try and sync
[junit4] 2> 1349692 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to
https://127.0.0.1:34140/collection1/
[junit4] 2> 1349692 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync
replicas to me
[junit4] 2> 1349692 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy
https://127.0.0.1:34140/collection1/ has no replicas
[junit4] 2> 1349694 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new
leader: https://127.0.0.1:34140/collection1/ shard1
[junit4] 2> 1349806 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1349806 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37552/solr ready
[junit4] 2> 1349806 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection
loss:false
[junit4] 2> 1349845 INFO
(coreZkRegister-6725-thread-1-processing-n:127.0.0.1:34140_ x:collection1
c:control_collection) [n:127.0.0.1:34140_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery
necessary
[junit4] 2> 1349867 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001/cores/collection1
[junit4] 2> 1349868 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001
[junit4] 2> 1349869 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1349870 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@6a0f9c6f{/,null,AVAILABLE}
[junit4] 2> 1349870 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@20fcb999{SSL,[ssl,
http/1.1]}{127.0.0.1:43522}
[junit4] 2> 1349871 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server Started @1351683ms
[junit4] 2> 1349871 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/tempDir-001/jetty1,
solrconfig=solrconfig.xml, hostContext=/, hostPort=43522,
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001/cores}
[junit4] 2> 1349871 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1349871 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1349871 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1349871 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1349871 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-19T15:17:39.890Z
[junit4] 2> 1349874 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1349874 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001/solr.xml
[junit4] 2> 1349879 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37552/solr
[junit4] 2> 1349884 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43522_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1349886 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43522_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:43522_
[junit4] 2> 1349887 INFO
(zkCallback-2320-thread-1-processing-n:127.0.0.1:43522_) [n:127.0.0.1:43522_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1349887 INFO (zkCallback-2315-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1349887 INFO
(zkCallback-2311-thread-3-processing-n:127.0.0.1:34140_) [n:127.0.0.1:34140_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1349933 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43522_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001/cores
[junit4] 2> 1349933 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43522_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1349934 INFO
(OverseerStateUpdate-97310412523962373-127.0.0.1:34140_-n_0000000000)
[n:127.0.0.1:34140_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1350942 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 7.0.0
[junit4] 2> 1350957 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1351032 WARN
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1351034 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 1351039 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1351039 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001/cores/collection1],
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-1-001/cores/collection1/data/]
[junit4] 2> 1351039 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap JMX
monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@26bc1020
[junit4] 2> 1351041 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=12, maxMergeAtOnceExplicit=42,
maxMergedSegmentMB=1.4111328125, floorSegmentMB=1.1123046875,
forceMergeDeletesPctAllowed=7.002848105698688, segmentsPerTier=19.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7745892724465951
[junit4] 2> 1351081 WARN
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1351091 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.UpdateHandler Using
UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1351091 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1351092 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 1351092 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 1351092 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy:
org.apache.lucene.index.MockRandomMergePolicy@346f476b
[junit4] 2> 1351093 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@719b592d[collection1] main]
[junit4] 2> 1351093 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1351094 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1351094 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 1351095 INFO
(searcherExecutor-6744-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@719b592d[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1351095 INFO
(coreLoadExecutor-6743-thread-1-processing-n:127.0.0.1:43522_)
[n:127.0.0.1:43522_ c:collection1 x:collection1] o.a.s.u.UpdateLog Could not
find max version in index or recent updates, using new clock 1556966603346673664
[junit4] 2> 1351099 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to
continue.
[junit4] 2> 1351099 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try
and sync
[junit4] 2> 1351099 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync replicas to
https://127.0.0.1:43522/collection1/
[junit4] 2> 1351099 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 1351099 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy https://127.0.0.1:43522/collection1/ has no
replicas
[junit4] 2> 1351101 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader:
https://127.0.0.1:43522/collection1/ shard1
[junit4] 2> 1351251 INFO
(coreZkRegister-6738-thread-1-processing-n:127.0.0.1:43522_ x:collection1
c:collection1) [n:127.0.0.1:43522_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 1351284 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001/cores/collection1
[junit4] 2> 1351284 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001
[junit4] 2> 1351285 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1351286 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@a21b5e4{/,null,AVAILABLE}
[junit4] 2> 1351286 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@459e883b{SSL,[ssl,
http/1.1]}{127.0.0.1:35248}
[junit4] 2> 1351287 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server Started @1353099ms
[junit4] 2> 1351287 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/tempDir-001/jetty2,
solrconfig=solrconfig.xml, hostContext=/, hostPort=35248,
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001/cores}
[junit4] 2> 1351287 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1351287 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1351287 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1351287 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1351287 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-19T15:17:41.306Z
[junit4] 2> 1351290 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1351290 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001/solr.xml
[junit4] 2> 1351294 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37552/solr
[junit4] 2> 1351299 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:35248_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 1351301 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:35248_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:35248_
[junit4] 2> 1351302 INFO (zkCallback-2315-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1351302 INFO
(zkCallback-2320-thread-1-processing-n:127.0.0.1:43522_) [n:127.0.0.1:43522_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1351302 INFO
(zkCallback-2326-thread-1-processing-n:127.0.0.1:35248_) [n:127.0.0.1:35248_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1351302 INFO
(zkCallback-2311-thread-3-processing-n:127.0.0.1:34140_) [n:127.0.0.1:34140_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1351342 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:35248_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001/cores
[junit4] 2> 1351342 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:35248_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1351343 INFO
(OverseerStateUpdate-97310412523962373-127.0.0.1:34140_-n_0000000000)
[n:127.0.0.1:34140_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1352352 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 7.0.0
[junit4] 2> 1352363 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1352441 WARN
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1352443 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 1352449 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1352450 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001/cores/collection1],
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-2-001/cores/collection1/data/]
[junit4] 2> 1352450 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap JMX
monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@26bc1020
[junit4] 2> 1352451 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=12, maxMergeAtOnceExplicit=42,
maxMergedSegmentMB=1.4111328125, floorSegmentMB=1.1123046875,
forceMergeDeletesPctAllowed=7.002848105698688, segmentsPerTier=19.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7745892724465951
[junit4] 2> 1352502 WARN
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1352518 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.UpdateHandler Using
UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1352518 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1352524 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 1352524 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 1352525 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy:
org.apache.lucene.index.MockRandomMergePolicy@1f1a9fcd
[junit4] 2> 1352526 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@2a0f1b99[collection1] main]
[junit4] 2> 1352527 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1352527 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1352527 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 1352528 INFO
(searcherExecutor-6755-thread-1-processing-n:127.0.0.1:35248_ x:collection1
c:collection1) [n:127.0.0.1:35248_ c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@2a0f1b99[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1352529 INFO
(coreLoadExecutor-6754-thread-1-processing-n:127.0.0.1:35248_)
[n:127.0.0.1:35248_ c:collection1 x:collection1] o.a.s.u.UpdateLog Could not
find max version in index or recent updates, using new clock 1556966604850331648
[junit4] 2> 1352532 INFO
(coreZkRegister-6749-thread-1-processing-n:127.0.0.1:35248_ x:collection1
c:collection1) [n:127.0.0.1:35248_ c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 1352532 INFO
(updateExecutor-2323-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
[junit4] 2> 1352532 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process.
recoveringAfterStartup=true
[junit4] 2> 1352532 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
[junit4] 2> 1352532 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates.
core=[collection1]
[junit4] 2> 1352533 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates.
FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 1352533 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core
[collection1] as recovering, leader is [https://127.0.0.1:43522/collection1/]
and I am [https://127.0.0.1:35248/collection1/]
[junit4] 2> 1352534 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery
command to [https://127.0.0.1:43522]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:35248_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 1352562 INFO (qtp1993243746-13108) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 1352562 INFO (qtp1993243746-13108) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard1 of collection1) have state: recovering
[junit4] 2> 1352562 INFO (qtp1993243746-13108) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=down, localState=active,
nodeName=127.0.0.1:35248_, coreNodeName=core_node2,
onlyIfActiveCheckResult=false, nodeProps:
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:35248","node_name":"127.0.0.1:35248_","state":"down"}
[junit4] 2> 1352680 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001/cores/collection1
[junit4] 2> 1352680 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001
[junit4] 2> 1352681 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1352682 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@10d2a72b{/,null,AVAILABLE}
[junit4] 2> 1352682 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@94b4a81{SSL,[ssl,
http/1.1]}{127.0.0.1:43281}
[junit4] 2> 1352683 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.Server Started @1354496ms
[junit4] 2> 1352683 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/tempDir-001/jetty3,
solrconfig=solrconfig.xml, hostContext=/, hostPort=43281,
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001/cores}
[junit4] 2> 1352683 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1352684 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1352684 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1352684 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1352684 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-19T15:17:42.703Z
[junit4] 2> 1352686 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1352686 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001/solr.xml
[junit4] 2> 1352691 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37552/solr
[junit4] 2> 1352696 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43281_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
[junit4] 2> 1352698 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43281_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:43281_
[junit4] 2> 1352699 INFO
(zkCallback-2320-thread-1-processing-n:127.0.0.1:43522_) [n:127.0.0.1:43522_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1352699 INFO
(zkCallback-2311-thread-3-processing-n:127.0.0.1:34140_) [n:127.0.0.1:34140_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1352699 INFO (zkCallback-2315-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1352699 INFO
(zkCallback-2326-thread-1-processing-n:127.0.0.1:35248_) [n:127.0.0.1:35248_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1352699 INFO
(zkCallback-2333-thread-1-processing-n:127.0.0.1:43281_) [n:127.0.0.1:43281_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1352721 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43281_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001/cores
[junit4] 2> 1352721 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [n:127.0.0.1:43281_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1352722 INFO
(OverseerStateUpdate-97310412523962373-127.0.0.1:34140_-n_0000000000)
[n:127.0.0.1:34140_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1353562 INFO (qtp1993243746-13108) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=recovering, localState=active,
nodeName=127.0.0.1:35248_, coreNodeName=core_node2,
onlyIfActiveCheckResult=false, nodeProps:
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:35248","node_name":"127.0.0.1:35248_","state":"recovering"}
[junit4] 2> 1353563 INFO (qtp1993243746-13108) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 1353563 INFO (qtp1993243746-13108) [n:127.0.0.1:43522_ ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:35248_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1000
[junit4] 2> 1353733 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 7.0.0
[junit4] 2> 1353750 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1353879 WARN
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1353883 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 1353891 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1353892 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001/cores/collection1],
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001/shard-3-001/cores/collection1/data/]
[junit4] 2> 1353892 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap JMX
monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@26bc1020
[junit4] 2> 1353894 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=12, maxMergeAtOnceExplicit=42,
maxMergedSegmentMB=1.4111328125, floorSegmentMB=1.1123046875,
forceMergeDeletesPctAllowed=7.002848105698688, segmentsPerTier=19.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7745892724465951
[junit4] 2> 1353937 WARN
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1353956 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.UpdateHandler Using
UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1353956 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1353957 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 1353957 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 1353958 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy:
org.apache.lucene.index.MockRandomMergePolicy@6b5f0338
[junit4] 2> 1353959 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@67d0816e[collection1] main]
[junit4] 2> 1353959 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1353959 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1353960 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 1353961 INFO
(searcherExecutor-6766-thread-1-processing-n:127.0.0.1:43281_ x:collection1
c:collection1) [n:127.0.0.1:43281_ c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@67d0816e[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1353961 INFO
(coreLoadExecutor-6765-thread-1-processing-n:127.0.0.1:43281_)
[n:127.0.0.1:43281_ c:collection1 x:collection1] o.a.s.u.UpdateLog Could not
find max version in index or recent updates, using new clock 1556966606351892480
[junit4] 2> 1353965 INFO
(coreZkRegister-6760-thread-1-processing-n:127.0.0.1:43281_ x:collection1
c:collection1) [n:127.0.0.1:43281_ c:collection1 s:shard1 r:core_node3
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 1353965 INFO
(updateExecutor-2330-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
[junit4] 2> 1353965 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process.
recoveringAfterStartup=true
[junit4] 2> 1353965 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
[junit4] 2> 1353965 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates.
core=[collection1]
[junit4] 2> 1353965 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates.
FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 1353965 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core
[collection1] as recovering, leader is [https://127.0.0.1:43522/collection1/]
and I am [https://127.0.0.1:43281/collection1/]
[junit4] 2> 1353967 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery
command to [https://127.0.0.1:43522]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:43281_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 1353970 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 1353970 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard1 of collection1) have state: recovering
[junit4] 2> 1353970 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=down, localState=active,
nodeName=127.0.0.1:43281_, coreNodeName=core_node3,
onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:43281","node_name":"127.0.0.1:43281_","state":"down"}
[junit4] 2> 1354056 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.SolrTestCaseJ4 ###Starting test
[junit4] 2> 1354056 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30
for each attempt
[junit4] 2> 1354056 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection:
collection1 failOnTimeout:true timeout (sec):30
[junit4] 2> 1354970 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=recovering, localState=active,
nodeName=127.0.0.1:43281_, coreNodeName=core_node3,
onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:43281","node_name":"127.0.0.1:43281_","state":"recovering"}
[junit4] 2> 1354971 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_ ]
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 1354971 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_ ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:43281_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1001
[junit4] 2> 1360563 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync
from [https://127.0.0.1:43522/collection1/] - recoveringAfterStartup=[true]
[junit4] 2> 1360564 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1
url=https://127.0.0.1:35248 START
replicas=[https://127.0.0.1:43522/collection1/] nUpdates=1000
[junit4] 2> 1360566 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1360566 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=0
[junit4] 2> 1360566 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint
millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1360566 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to
do a PeerSync
[junit4] 2> 1360566 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1360567 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted
changes. Skipping IW.commit.
[junit4] 2> 1360567 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1360567 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery
was successful.
[junit4] 2> 1360567 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered
during PeerSync.
[junit4] 2> 1360567 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
[junit4] 2> 1360567 INFO
(recoveryExecutor-2324-thread-1-processing-n:127.0.0.1:35248_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:35248_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active
after recovery.
[junit4] 2> 1361971 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync
from [https://127.0.0.1:43522/collection1/] - recoveringAfterStartup=[true]
[junit4] 2> 1361972 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1
url=https://127.0.0.1:43281 START
replicas=[https://127.0.0.1:43522/collection1/] nUpdates=1000
[junit4] 2> 1361974 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1361974 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=0
[junit4] 2> 1361975 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint
millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1361975 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to
do a PeerSync
[junit4] 2> 1361975 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1361975 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted
changes. Skipping IW.commit.
[junit4] 2> 1361976 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1361976 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery
was successful.
[junit4] 2> 1361976 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered
during PeerSync.
[junit4] 2> 1361976 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
[junit4] 2> 1361976 INFO
(recoveryExecutor-2331-thread-1-processing-n:127.0.0.1:43281_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:43281_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active
after recovery.
[junit4] 2> 1363057 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
[junit4] 2> 1363082 INFO (qtp521848957-13071) [n:127.0.0.1:34140_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1363082 INFO (qtp521848957-13071) [n:127.0.0.1:34140_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1363083 INFO (qtp521848957-13071) [n:127.0.0.1:34140_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1363083 INFO (qtp521848957-13071) [n:127.0.0.1:34140_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 0
[junit4] 2> 1363088 INFO (qtp1993243746-13114) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1363088 INFO (qtp1993243746-13114) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1363089 INFO (qtp1993243746-13114) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1363089 INFO (qtp1993243746-13114) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:43522/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 0
[junit4] 2> 1363116 INFO (qtp967235790-13171) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1363116 INFO (qtp967235790-13171) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1363116 INFO (qtp967235790-13171) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1363116 INFO (qtp967235790-13171) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:43522/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 0
[junit4] 2> 1363119 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1363120 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1363120 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1363120 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:43522/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 0
[junit4] 2> 1363120 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 35
[junit4] 2> 1363123 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1363126 INFO (qtp1961413100-13137) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1363129 INFO (qtp967235790-13178) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1365131 INFO (qtp521848957-13076) [n:127.0.0.1:34140_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{deleteByQuery=*:* (-1556966618063437824)} 0 1
[junit4] 2> 1365135 INFO (qtp967235790-13176) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&_version_=-1556966618065534976&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
(-1556966618065534976)} 0 1
[junit4] 2> 1365135 INFO (qtp1961413100-13141) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&_version_=-1556966618065534976&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
(-1556966618065534976)} 0 1
[junit4] 2> 1365135 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{deleteByQuery=*:* (-1556966618065534976)} 0 3
[junit4] 2> 1365141 INFO (qtp1961413100-13143) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[0
(1556966618072875008)]} 0 0
[junit4] 2> 1365141 INFO (qtp967235790-13172) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[0
(1556966618072875008)]} 0 0
[junit4] 2> 1365141 INFO (qtp1993243746-13113) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[0 (1556966618072875008)]} 0 2
[junit4] 2> 1365144 INFO (qtp967235790-13177) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[1
(1556966618074972160)]} 0 0
[junit4] 2> 1365144 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[1
(1556966618074972160)]} 0 0
[junit4] 2> 1365144 INFO (qtp1993243746-13114) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[1 (1556966618074972160)]} 0 2
[junit4] 2> 1365145 INFO (qtp1961413100-13137) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[2
(1556966618078117888)]} 0 0
[junit4] 2> 1365145 INFO (qtp967235790-13178) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[2
(1556966618078117888)]} 0 0
[junit4] 2> 1365145 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[2 (1556966618078117888)]} 0 0
[junit4] 2> 1365146 INFO (qtp967235790-13176) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[3
(1556966618080215040)]} 0 0
[junit4] 2> 1365146 INFO (qtp1961413100-13141) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[3
(1556966618080215040)]} 0 0
[junit4] 2> 1365146 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[3 (1556966618080215040)]} 0 0
[junit4] 2> 1365148 INFO (qtp1961413100-13143) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[4
(1556966618081263616)]} 0 0
[junit4] 2> 1365155 INFO (qtp967235790-13172) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[4
(1556966618081263616)]} 0 7
[junit4] 2> 1365155 INFO (qtp1993243746-13113) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[4 (1556966618081263616)]} 0 8
[junit4] 2> 1365156 INFO (qtp967235790-13177) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[5
(1556966618090700800)]} 0 0
[junit4] 2> 1365156 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[5
(1556966618090700800)]} 0 0
[junit4] 2> 1365157 INFO (qtp1993243746-13114) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[5 (1556966618090700800)]} 0 0
[junit4] 2> 1365157 INFO (qtp1961413100-13137) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[6
(1556966618091749376)]} 0 0
[junit4] 2> 1365158 INFO (qtp967235790-13178) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[6
(1556966618091749376)]} 0 0
[junit4] 2> 1365158 INFO (qtp1993243746-13115) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[6 (1556966618091749376)]} 0 0
[junit4] 2> 1365159 INFO (qtp1961413100-13141) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[7
(1556966618092797952)]} 0 0
[junit4] 2> 1365159 INFO (qtp967235790-13176) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[7
(1556966618092797952)]} 0 0
[junit4] 2> 1365159 INFO (qtp1993243746-13109) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[7 (1556966618092797952)]} 0 0
[junit4] 2> 1365160 INFO (qtp967235790-13172) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[8
(1556966618093846528)]} 0 0
[junit4] 2> 1365160 INFO (qtp1961413100-13143) [n:127.0.0.1:35248_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[8
(1556966618093846528)]} 0 0
[junit4] 2> 1365160 INFO (qtp1993243746-13113) [n:127.0.0.1:43522_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[8 (1556966618093846528)]} 0 0
[junit4] 2> 1365162 INFO (qtp967235790-13177) [n:127.0.0.1:43281_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:43522/collection1/&wt=javabin&version=2}{add=[9
(1556966618096992256)]} 0 0
[junit4] 2> 1365162 INFO (qtp1961413100-13136) [n:127.0.0.1:35248_ c
[...truncated too long message...]
st-seed#[684F6A462ED9C102]) [ ] o.a.s.m.SolrMetricManager Closing metric
reporters for: solr.node
[junit4] 2> 1555432 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.AbstractConnector Stopped ServerConnector@1094507a{SSL,[ssl,
http/1.1]}{127.0.0.1:35248}
[junit4] 2> 1555432 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.s.ServletContextHandler@2b32e331{/,null,UNAVAILABLE}
[junit4] 2> 1555433 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ChaosMonkey monkey: stop shard! 43281
[junit4] 2> 1555433 INFO
(TEST-PeerSyncReplicationTest.test-seed#[684F6A462ED9C102]) [ ]
o.a.s.c.ZkTestServer connecting to 127.0.0.1:37552 37552
[junit4] 2> 1555539 INFO (Thread-2765) [ ] o.a.s.c.ZkTestServer
connecting to 127.0.0.1:37552 37552
[junit4] 2> 1555540 WARN (Thread-2765) [ ] o.a.s.c.ZkTestServer Watch
limit violations:
[junit4] 2> Maximum concurrent create/delete watches above limit:
[junit4] 2>
[junit4] 2> 6 /solr/aliases.json
[junit4] 2> 5 /solr/security.json
[junit4] 2> 5 /solr/configs/conf1
[junit4] 2> 4 /solr/collections/collection1/state.json
[junit4] 2>
[junit4] 2> Maximum concurrent data watches above limit:
[junit4] 2>
[junit4] 2> 6 /solr/clusterstate.json
[junit4] 2> 6 /solr/clusterprops.json
[junit4] 2> 2
/solr/collections/collection1/leader_elect/shard1/election/97310412523962377-core_node1-n_0000000000
[junit4] 2> 2
/solr/overseer_elect/election/97310412523962377-127.0.0.1:43522_-n_0000000001
[junit4] 2>
[junit4] 2> Maximum concurrent children watches above limit:
[junit4] 2>
[junit4] 2> 207 /solr/overseer/collection-queue-work
[junit4] 2> 22 /solr/overseer/queue
[junit4] 2> 6 /solr/collections
[junit4] 2> 5 /solr/live_nodes
[junit4] 2> 5 /solr/overseer/queue-work
[junit4] 2>
[junit4] 2> NOTE: reproduce with: ant test
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test
-Dtests.seed=684F6A462ED9C102 -Dtests.multiplier=3 -Dtests.slow=true
-Dtests.locale=es-PY -Dtests.timezone=America/Puerto_Rico -Dtests.asserts=true
-Dtests.file.encoding=US-ASCII
[junit4] FAILURE 207s J2 | PeerSyncReplicationTest.test <<<
[junit4] > Throwable #1: java.lang.AssertionError: timeout waiting to see
all nodes active
[junit4] > at
__randomizedtesting.SeedInfo.seed([684F6A462ED9C102:E01B559C8025ACFA]:0)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
[junit4] > at java.lang.Thread.run(Thread.java:745)
[junit4] 2> 1555543 INFO
(SUITE-PeerSyncReplicationTest-seed#[684F6A462ED9C102]-worker) [ ]
o.a.s.SolrTestCaseJ4 ###deleteCore
[junit4] 2> NOTE: leaving temporary files on disk at:
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_684F6A462ED9C102-001
[junit4] 2> Jan 19, 2017 3:21:05 PM
com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 1 leaked
thread(s).
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene70):
{other_tl1=PostingsFormat(name=Memory),
range_facet_l_dv=PostingsFormat(name=Direct),
rnd_s=PostingsFormat(name=Memory), multiDefault=FST50,
intDefault=PostingsFormat(name=Memory), a_i1=FST50, range_facet_l=FST50,
_version_=PostingsFormat(name=Memory), a_t=FST50,
id=PostingsFormat(name=Direct), range_facet_i_dv=FST50,
text=PostingsFormat(name=Asserting), timestamp=FST50},
docValues:{other_tl1=DocValuesFormat(name=Direct),
range_facet_l_dv=DocValuesFormat(name=Lucene70),
rnd_s=DocValuesFormat(name=Direct), multiDefault=DocValuesFormat(name=Memory),
intDefault=DocValuesFormat(name=Direct), a_i1=DocValuesFormat(name=Memory),
range_facet_l=DocValuesFormat(name=Memory),
_version_=DocValuesFormat(name=Direct), a_t=DocValuesFormat(name=Memory),
range_facet_i_dv=DocValuesFormat(name=Memory),
id=DocValuesFormat(name=Lucene70), text=DocValuesFormat(name=Lucene70),
timestamp=DocValuesFormat(name=Memory)}, maxPointsInLeafNode=173,
maxMBSortInHeap=7.529709771674732, sim=RandomSimilarity(queryNorm=false): {},
locale=es-PY, timezone=America/Puerto_Rico
[junit4] 2> NOTE: Linux 4.4.0-53-generic amd64/Oracle Corporation
1.8.0_112 (64-bit)/cpus=12,threads=1,free=110841912,total=517668864
[junit4] 2> NOTE: All tests run in this JVM: [TestSchemaNameResource,
ConnectionManagerTest, TestCustomStream, DistanceUnitsTest,
MultiThreadedOCPTest, TestNumericTerms64, HighlighterMaxOffsetTest,
SynonymTokenizerTest, TestRequestForwarding, TestSolrQueryParser,
CdcrBootstrapTest, TestPhraseSuggestions, TestPushWriter, TestXmlQParser,
RequestHandlersTest, ExternalFileFieldSortTest, TestFilteredDocIdSet,
ShowFileRequestHandlerTest, RecoveryZkTest, TestNamedUpdateProcessors,
TestSolr4Spatial2, TestConfigSetProperties, RecoveryAfterSoftCommitTest,
PrimitiveFieldTypeTest, HdfsTlogReplayBufferedWhileIndexingTest,
URLClassifyProcessorTest, CollectionStateFormat2Test, AssignTest,
TestCollationField, TestSolrDeletionPolicy1, TestInfoStreamLogging,
TestSolrCloudWithHadoopAuthPlugin, TestCSVResponseWriter, TestManagedSchema,
PreAnalyzedFieldTest, FullSolrCloudDistribCmdsTest, SuggesterWFSTTest,
SuggesterTest, DistributedExpandComponentTest, TestSearcherReuse,
BlockCacheTest, DistributedSuggestComponentTest, TestReload,
SolrGraphiteReporterTest, TestSQLHandlerNonCloud, TestClusterProperties,
TestFieldCacheWithThreads, BigEndianAscendingWordDeserializerTest,
MBeansHandlerTest, TestSolrDynamicMBean, TestReloadAndDeleteDocs,
TestQuerySenderNoQuery, TestSolrCoreSnapshots, TestMergePolicyConfig,
FieldMutatingUpdateProcessorTest, OutputWriterTest, CdcrRequestHandlerTest,
TestDeleteCollectionOnDownNodes, HardAutoCommitTest, TestFieldResource,
RulesTest, TestCloudPivotFacet, HdfsRestartWhileUpdatingTest,
LeaderInitiatedRecoveryOnShardRestartTest,
ClassificationUpdateProcessorIntegrationTest, UpdateParamsTest, TestFieldCache,
TestStressReorder, RAMDirectoryFactoryTest, TestPointFields,
TestManagedResourceStorage, SolrCLIZkUtilsTest, CoreAdminHandlerTest,
StatelessScriptUpdateProcessorFactoryTest, TestReRankQParserPlugin,
TestAuthenticationFramework, ConnectionReuseTest, AnalysisErrorHandlingTest,
TestLuceneMatchVersion, TestIndexSearcher, ChangedSchemaMergeTest,
SpatialRPTFieldTypeTest, TestConfig, ReplicationFactorTest, TestWriterPerf,
InfoHandlerTest, HdfsNNFailoverTest, TestMiniSolrCloudClusterSSL,
AutoCommitTest, TestConfigReload, TestComplexPhraseQParserPlugin,
LeaderFailoverAfterPartitionTest, TestScoreJoinQPNoScore,
DistributedFacetPivotLongTailTest, TestSha256AuthenticationProvider,
ShufflingReplicaListTransformerTest, OverseerTest, TestLockTree,
DocValuesMultiTest, TestStressCloudBlindAtomicUpdates,
CurrencyFieldXmlFileTest, TestFieldCacheSort,
DistributedFacetPivotWhiteBoxTest, ExplicitHLLTest,
TestManagedSynonymFilterFactory, EnumFieldTest, LeaderElectionTest,
TestPivotHelperCode, TestSort, TestExclusionRuleCollectionAccess,
BasicDistributedZkTest, UnloadDistributedZkTest, TestZkChroot, TestRecovery,
TermVectorComponentDistributedTest, DistributedTermsComponentTest,
TestCoreContainer, SimpleFacetsTest, TestSolr4Spatial, SolrCoreTest,
SpellCheckComponentTest, QueryElevationComponentTest, PeerSyncTest,
TestFiltering, DistributedQueryElevationComponentTest,
SignatureUpdateProcessorFactoryTest, TestExtendedDismaxParser,
SuggesterFSTTest, SolrRequestParserTest, TestStressLucene, TestCSVLoader,
SolrCoreCheckLockOnStartupTest, DirectUpdateHandlerOptimizeTest,
SortByFunctionTest, SolrInfoMBeanTest, TestSurroundQueryParser,
LukeRequestHandlerTest, TestQueryTypes, FileBasedSpellCheckerTest,
XmlUpdateRequestHandlerTest, TestIndexingPerformance, RequiredFieldsTest,
FastVectorHighlighterTest, RegexBoostProcessorTest, TestJmxIntegration,
UpdateRequestProcessorFactoryTest, TestAnalyzedSuggestions,
TestPartialUpdateDeduplication, PingRequestHandlerTest, HighlighterConfigTest,
TestQuerySenderListener, AlternateDirectoryTest, TestSolrIndexConfig,
CopyFieldTest, SolrIndexConfigTest, BadComponentTest, TestStressRecovery,
TestSolrDeletionPolicy2, TestDocSet, TestBinaryField,
TestPostingsSolrHighlighter, NotRequiredUniqueKeyTest, TestCodecSupport,
EchoParamsTest, TestSweetSpotSimilarityFactory, TestDFRSimilarityFactory,
TestPerFieldSimilarity, TestLMDirichletSimilarityFactory,
TestIBSimilarityFactory, ResourceLoaderTest, TestFastLRUCache, PrimUtilsTest,
DateFieldTest, TestSolrJ, TestLRUCache, TestDocumentBuilder,
SystemInfoHandlerTest, TestRTGBase, TestCrossCoreJoin,
TestHighlightDedupGrouping, TestTolerantSearch,
TestEmbeddedSolrServerConstructors, TestJettySolrRunner, AliasIntegrationTest,
CleanupOldIndexTest, CreateCollectionCleanupTest, DeleteInactiveReplicaTest,
DeleteLastCustomShardedReplicaTest, DeleteReplicaTest, DistributedQueueTest,
DocValuesNotIndexedTest, ForceLeaderTest,
OutOfBoxZkACLAndCredentialsProvidersTest,
OverriddenZkACLAndCredentialsProvidersTest, OverseerModifyCollectionTest,
PeerSyncReplicationTest]
[junit4] Completed [531/680 (1!)] on J2 in 207.79s, 1 test, 1 failure <<<
FAILURES!
[...truncated 62945 lines...]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]