Build: https://builds.apache.org/job/Lucene-Solr-BadApples-Tests-master/227/
2 tests failed.
FAILED:
org.apache.solr.cloud.LegacyCloudClusterPropTest.testCreateCollectionSwitchLegacyCloud
Error Message:
Failed while waiting for active collection Timeout waiting to see state for
collection=legacyFalse :null Live Nodes: [127.0.0.1:34854_solr] Last available
state: null
Stack Trace:
java.lang.RuntimeException: Failed while waiting for active collection
Timeout waiting to see state for collection=legacyFalse :null
Live Nodes: [127.0.0.1:34854_solr]
Last available state: null
at
__randomizedtesting.SeedInfo.seed([6047ED57EE908DAC:B1401FD24A9F069E]:0)
at
org.apache.solr.cloud.MiniSolrCloudCluster.waitForActiveCollection(MiniSolrCloudCluster.java:736)
at
org.apache.solr.cloud.MiniSolrCloudCluster.waitForActiveCollection(MiniSolrCloudCluster.java:742)
at
org.apache.solr.cloud.LegacyCloudClusterPropTest.createAndTest(LegacyCloudClusterPropTest.java:96)
at
org.apache.solr.cloud.LegacyCloudClusterPropTest.testCreateCollectionSwitchLegacyCloud(LegacyCloudClusterPropTest.java:78)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1742)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:935)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:971)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:985)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:944)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:830)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:880)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:891)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.lang.Thread.run(Thread.java:748)
FAILED:
org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitShardWithRule
Error Message:
Error from server at http://127.0.0.1:38051/dgfqm/xd: Could not find collection
: shardSplitWithRule_rewrite
Stack Trace:
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error
from server at http://127.0.0.1:38051/dgfqm/xd: Could not find collection :
shardSplitWithRule_rewrite
at
__randomizedtesting.SeedInfo.seed([6047ED57EE908DAC:E1493CE57362D687]:0)
at
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:643)
at
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:255)
at
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:244)
at
org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:484)
at
org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:414)
at
org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:1110)
at
org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:884)
at
org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:817)
at
org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:194)
at
org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:211)
at
org.apache.solr.cloud.api.collections.ShardSplitTest.doSplitShardWithRule(ShardSplitTest.java:661)
at
org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitShardWithRule(ShardSplitTest.java:628)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1742)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:935)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:971)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:985)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1063)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1035)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:944)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:830)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:880)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:891)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.lang.Thread.run(Thread.java:748)
Build Log:
[...truncated 13740 lines...]
[junit4] Suite: org.apache.solr.cloud.api.collections.ShardSplitTest
[junit4] 2> 1618534 INFO
(SUITE-ShardSplitTest-seed#[6047ED57EE908DAC]-worker) [ ]
o.a.s.SolrTestCaseJ4 SecureRandom sanity checks:
test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 2> Creating dataDir:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/init-core-data-001
[junit4] 2> 1618572 WARN
(SUITE-ShardSplitTest-seed#[6047ED57EE908DAC]-worker) [ ]
o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=1 numCloses=1
[junit4] 2> 1618572 INFO
(SUITE-ShardSplitTest-seed#[6047ED57EE908DAC]-worker) [ ]
o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true)
w/NUMERIC_DOCVALUES_SYSPROP=false
[junit4] 2> 1618587 INFO
(SUITE-ShardSplitTest-seed#[6047ED57EE908DAC]-worker) [ ]
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via:
@org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
[junit4] 2> 1618587 INFO
(SUITE-ShardSplitTest-seed#[6047ED57EE908DAC]-worker) [ ]
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property:
/dgfqm/xd
[junit4] 2> 1618664 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 1618700 INFO (ZkTestServer Run Thread) [ ]
o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 1618700 INFO (ZkTestServer Run Thread) [ ]
o.a.s.c.ZkTestServer Starting server
[junit4] 2> 1618745 ERROR (ZkTestServer Run Thread) [ ]
o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper
server won't take any action on ERROR or SHUTDOWN server state changes
[junit4] 2> 1618800 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer start zk server on port:36502
[junit4] 2> 1618800 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:36502
[junit4] 2> 1618800 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer connecting to 127.0.0.1 36502
[junit4] 2> 1618834 INFO (zkConnectionManagerCallback-3521-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1618839 INFO (zkConnectionManagerCallback-3523-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1618840 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
to /configs/conf1/solrconfig.xml
[junit4] 2> 1618841 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/schema15.xml
to /configs/conf1/schema.xml
[junit4] 2> 1618842 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 1618851 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
to /configs/conf1/stopwords.txt
[junit4] 2> 1618852 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/protwords.txt
to /configs/conf1/protwords.txt
[junit4] 2> 1618853 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/currency.xml
to /configs/conf1/currency.xml
[junit4] 2> 1618854 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
to /configs/conf1/enumsConfig.xml
[junit4] 2> 1618855 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
to /configs/conf1/open-exchange-rates.json
[junit4] 2> 1618856 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 1618856 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
to /configs/conf1/old_synonyms.txt
[junit4] 2> 1618857 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkTestServer put
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
to /configs/conf1/synonyms.txt
[junit4] 2> 1618871 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless
explicitly asked otherwise
[junit4] 2> 1619284 WARN
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 1619284 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
[junit4] 2> 1619284 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1
...
[junit4] 2> 1619284 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z;
git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
[junit4] 2> 1619297 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 1619297 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 1619297 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 1619299 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@60967484{/dgfqm/xd,null,AVAILABLE}
[junit4] 2> 1619299 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.AbstractConnector Started
ServerConnector@17437b04{HTTP/1.1,[http/1.1]}{127.0.0.1:45893}
[junit4] 2> 1619299 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.e.j.s.Server Started @1619355ms
[junit4] 2> 1619299 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/tempDir-001/control/data,
hostContext=/dgfqm/xd, hostPort=45893,
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/control-001/cores}
[junit4] 2> 1619300 ERROR
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may
be missing or incomplete.
[junit4] 2> 1619300 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 1619300 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr™
version 8.0.0
[junit4] 2> 1619300 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on
port null
[junit4] 2> 1619300 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1619300 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2018-12-06T05:22:11.802Z
[junit4] 2> 1619302 INFO (zkConnectionManagerCallback-3525-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1619303 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1619303 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/control-001/solr.xml
[junit4] 2> 1619322 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 1619322 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 1619324 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 1619827 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:36502/solr
[junit4] 2> 1619845 INFO (zkConnectionManagerCallback-3529-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1619863 INFO (zkConnectionManagerCallback-3531-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1620254 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerElectionContext I am going
to be the leader 127.0.0.1:45893_dgfqm%2Fxd
[junit4] 2> 1620254 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer Overseer
(id=73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000) starting
[junit4] 2> 1620309 INFO (zkConnectionManagerCallback-3538-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1620356 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.s.i.ZkClientClusterStateProvider
Cluster at 127.0.0.1:36502/solr ready
[junit4] 2> 1620356 INFO
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer Starting to work on the
main queue : 127.0.0.1:45893_dgfqm%2Fxd
[junit4] 2> 1620356 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Process
current queue of overseer operations
[junit4] 2> 1620426 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.ZkController Register node as live
in ZooKeeper:/live_nodes/127.0.0.1:45893_dgfqm%2Fxd
[junit4] 2> 1620469 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Cleaning up
work-queue. #Running tasks: 0 #Completed tasks: 0
[junit4] 2> 1620469 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[]
[junit4] 2> 1620470 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1620470 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: []
[junit4] 2> 1620470 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: []
[junit4] 2> 1620521 INFO (zkCallback-3530-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1620564 INFO (zkCallback-3537-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1620601 INFO
(OverseerAutoScalingTriggerThread-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[ ] o.a.s.c.a.NodeLostTrigger NodeLostTrigger .auto_add_replicas - Initial
livenodes: [127.0.0.1:45893_dgfqm%2Fxd]
[junit4] 2> 1620637 INFO (ScheduledTrigger-3605-thread-2) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 1 and last live nodes: 1
[junit4] 2> 1620639 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.h.a.MetricsHistoryHandler No .system
collection, keeping metrics history in memory.
[junit4] 2> 1620760 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.node' (registry 'solr.node') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1620816 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jvm' (registry 'solr.jvm') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1620816 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jetty' (registry 'solr.jetty') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1620817 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC])
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.CorePropertiesLocator Found 0 core
definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/control-001/cores
[junit4] 2> 1621038 INFO (zkConnectionManagerCallback-3544-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1621065 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1621067 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:36502/solr
ready
[junit4] 2> 1621123 INFO (qtp376042374-12930)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.h.a.CollectionsHandler Invoked
Collection Action :create with params
collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:45893_dgfqm%252Fxd&wt=javabin&version=2
and sendToOCPQueue=true
[junit4] 2> 1621176 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Got 1 tasks
from work-queue :
[[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ecf]]
[junit4] 2> 1621176 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Marked task
[/overseer/collection-queue-work/qn-0000000000] as running
[junit4] 2> 1621176 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Overseer
Collection Message Handler: Get the message
id:/overseer/collection-queue-work/qn-0000000000 message:{
[junit4] 2> "name":"control_collection",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"1",
[junit4] 2> "createNodeSet":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "stateFormat":"2",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"}
[junit4] 2> 1621177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Cleaning up
work-queue. #Running tasks: 1 #Completed tasks: 0
[junit4] 2> 1621177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1621177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1621177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: []
[junit4] 2> 1621177 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1621177 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Runner
processing /overseer/collection-queue-work/qn-0000000000
[junit4] 2> 1621177 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.OverseerCollectionMessageHandler
OverseerCollectionMessageHandler.processMessage : create , {
[junit4] 2> "name":"control_collection",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"1",
[junit4] 2> "createNodeSet":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "stateFormat":"2",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"}
[junit4] 2> 1621177 INFO
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Create
collection control_collection
[junit4] 2> 1621178 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.OverseerCollectionMessageHandler
creating collections conf node /collections/control_collection
[junit4] 2> 1621178 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Check for
collection zkNode:control_collection
[junit4] 2> 1621193 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Collection
zkNode exists
[junit4] 2> 1621269 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "name":"control_collection",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"1",
[junit4] 2> "createNodeSet":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "stateFormat":"2",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"} current state version: 0
[junit4] 2> 1621269 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ClusterStateMutator building a new
cName: control_collection
[junit4] 2> 1621281 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ZkStateWriter going to
create_collection /collections/control_collection/state.json
[junit4] 2> 1621382 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.Assign shardnames [shard1] NRT 1
TLOG 0 PULL 0 , policy null, nodeList [127.0.0.1:45893_dgfqm%2Fxd]
[junit4] 2> 1621422 INFO (qtp376042374-12932)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/metrics
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
status=0 QTime=0
[junit4] 2> 1621455 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/metrics
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
status=0 QTime=0
[junit4] 2> 1621456 INFO (qtp376042374-12929)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/metrics
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
status=0 QTime=0
[junit4] 2> 1621457 INFO (qtp376042374-12931)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/metrics
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
status=0 QTime=0
[junit4] 2> 1621458 INFO (qtp376042374-12932)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/metrics
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
status=0 QTime=0
[junit4] 2> 1621471 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Creating
SolrCores for new collection control_collection, shardNames [shard1] , message
: {
[junit4] 2> "name":"control_collection",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"1",
[junit4] 2> "createNodeSet":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "stateFormat":"2",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"}
[junit4] 2> 1621473 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Creating
core control_collection_shard1_replica_n1 as part of shard shard1 of collection
control_collection on 127.0.0.1:45893_dgfqm%2Fxd
[junit4] 2> 1621508 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd x:control_collection_shard1_replica_n1]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 1621508 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd x:control_collection_shard1_replica_n1]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 1621545 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "roles":null,
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "node_name":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "numShards":"1",
[junit4] 2> "state":"down",
[junit4] 2> "shard":"shard1",
[junit4] 2> "collection":"control_collection",
[junit4] 2> "type":"NRT",
[junit4] 2> "operation":"state"} current state version: 0
[junit4] 2> 1621545 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ReplicaMutator Update state
numShards=1 message={
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "roles":null,
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "node_name":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "numShards":"1",
[junit4] 2> "state":"down",
[junit4] 2> "shard":"shard1",
[junit4] 2> "collection":"control_collection",
[junit4] 2> "type":"NRT",
[junit4] 2> "operation":"state"}
[junit4] 2> 1621546 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ReplicaMutator Will update state
for replica:
core_node2:{"core":"control_collection_shard1_replica_n1","base_url":"http://127.0.0.1:45893/dgfqm/xd","node_name":"127.0.0.1:45893_dgfqm%2Fxd","state":"down","type":"NRT"}
[junit4] 2> 1621546 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ReplicaMutator Collection is now:
DocCollection(control_collection//collections/control_collection/state.json/0)={
[junit4] 2> "pullReplicas":"0",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "router":{"name":"compositeId"},
[junit4] 2> "maxShardsPerNode":"1",
[junit4] 2> "autoAddReplicas":"false",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "tlogReplicas":"0",
[junit4] 2> "shards":{"shard1":{
[junit4] 2> "range":"80000000-7fffffff",
[junit4] 2> "state":"active",
[junit4] 2> "replicas":{"core_node2":{
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "node_name":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "state":"down",
[junit4] 2> "type":"NRT"}}}}}
[junit4] 2> 1621638 INFO (ScheduledTrigger-3605-thread-1) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 1 and last live nodes: 1
[junit4] 2> 1621662 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ZkStateWriter going to
update_collection /collections/control_collection/state.json version: 0
[junit4] 2> 1622598 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene
MatchVersion: 8.0.0
[junit4] 2> 1622695 INFO (ScheduledTrigger-3605-thread-4) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 1 and last live nodes: 1
[junit4] 2> 1622700 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema
[control_collection_shard1_replica_n1] Schema name=test
[junit4] 2> 1622951 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema
test/1.6 with uniqueid field id
[junit4] 2> 1622993 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore
'control_collection_shard1_replica_n1' using configuration from collection
control_collection, trusted=true
[junit4] 2> 1622994 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX
monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry
'solr.core.control_collection.shard1.replica_n1') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1622994 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore
[[control_collection_shard1_replica_n1] ] Opening new SolrCore at
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/control-001/cores/control_collection_shard1_replica_n1],
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/control-001/cores/control_collection_shard1_replica_n1/data/]
[junit4] 2> 1622997 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy:
[LogDocMergePolicy: minMergeSize=1000, mergeFactor=29,
maxMergeSize=9223372036854775807,
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false,
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.31371475675329086]
[junit4] 2> 1623045 WARN (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID
paramSet a in requestHandler {type = requestHandler,name = /dump,class =
DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1623142 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog
implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1623142 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Initializing
UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1623143 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit:
disabled
[junit4] 2> 1623143 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit:
disabled
[junit4] 2> 1623144 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=44, maxMergeAtOnceExplicit=10,
maxMergedSegmentMB=95.3564453125, floorSegmentMB=1.4462890625,
forceMergeDeletesPctAllowed=28.58954155454867, segmentsPerTier=33.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.20359640935856219,
deletesPctAllowed=26.79361551345278
[junit4] 2> 1623144 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening
[Searcher@275efb37[control_collection_shard1_replica_n1] main]
[junit4] 2> 1623145 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 1623146 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded
null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1623146 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will
be reserved for 10000ms.
[junit4] 2> 1623146 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max
version in index or recent updates, using new clock 1619078661430837248
[junit4] 2> 1623149 INFO
(searcherExecutor-3612-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd
x:control_collection_shard1_replica_n1 c:control_collection s:shard1)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore
[control_collection_shard1_replica_n1] Registered new searcher
Searcher@275efb37[control_collection_shard1_replica_n1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1623163 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update
of terms at /collections/control_collection/terms/shard1 to
Terms{values={core_node2=0}, version=0}
[junit4] 2> 1623163 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase
make sure parent is created /collections/control_collection/leaders/shard1
[junit4] 2> 1623177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Got 0 tasks
from work-queue : [[]]
[junit4] 2> 1623177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Cleaning up
work-queue. #Running tasks: 1 #Completed tasks: 0
[junit4] 2> 1623177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1623177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1623177 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: []
[junit4] 2> 1623177 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1623178 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext
Enough replicas found to continue.
[junit4] 2> 1623178 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I
may be the new leader - try and sync
[junit4] 2> 1623178 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:45893/dgfqm/xd/control_collection_shard1_replica_n1/
[junit4] 2> 1623178 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now
sync replicas to me
[junit4] 2> 1623179 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy
http://127.0.0.1:45893/dgfqm/xd/control_collection_shard1_replica_n1/ has no
replicas
[junit4] 2> 1623179 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase
Creating leader registration node
/collections/control_collection/leaders/shard1/leader after winning as
/collections/control_collection/leader_elect/shard1/election/73788473994969092-core_node2-n_0000000000
[junit4] 2> 1623180 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am
the new leader:
http://127.0.0.1:45893/dgfqm/xd/control_collection_shard1_replica_n1/ shard1
[junit4] 2> 1623189 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "operation":"leader",
[junit4] 2> "shard":"shard1",
[junit4] 2> "collection":"control_collection",
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "state":"active"} current state version: 0
[junit4] 2> 1623290 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ZkStateWriter going to
update_collection /collections/control_collection/state.json version: 1
[junit4] 2> 1623290 INFO (zkCallback-3530-thread-1) [ ]
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent
state:SyncConnected type:NodeDataChanged
path:/collections/control_collection/state.json] for collection
[control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 1623290 INFO (zkCallback-3530-thread-2) [ ]
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent
state:SyncConnected type:NodeDataChanged
path:/collections/control_collection/state.json] for collection
[control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 1623292 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.c.ZkController I am the leader,
no recovery necessary
[junit4] 2> 1623293 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "core_node_name":"core_node2",
[junit4] 2> "roles":null,
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "node_name":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "numShards":"1",
[junit4] 2> "state":"active",
[junit4] 2> "shard":"shard1",
[junit4] 2> "collection":"control_collection",
[junit4] 2> "type":"NRT",
[junit4] 2> "operation":"state"} current state version: 0
[junit4] 2> 1623293 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ReplicaMutator Update state
numShards=1 message={
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "core_node_name":"core_node2",
[junit4] 2> "roles":null,
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "node_name":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "numShards":"1",
[junit4] 2> "state":"active",
[junit4] 2> "shard":"shard1",
[junit4] 2> "collection":"control_collection",
[junit4] 2> "type":"NRT",
[junit4] 2> "operation":"state"}
[junit4] 2> 1623293 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ReplicaMutator Will update state
for replica:
core_node2:{"core":"control_collection_shard1_replica_n1","base_url":"http://127.0.0.1:45893/dgfqm/xd","node_name":"127.0.0.1:45893_dgfqm%2Fxd","state":"active","type":"NRT","leader":"true"}
[junit4] 2> 1623293 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ReplicaMutator Collection is now:
DocCollection(control_collection//collections/control_collection/state.json/2)={
[junit4] 2> "pullReplicas":"0",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "router":{"name":"compositeId"},
[junit4] 2> "maxShardsPerNode":"1",
[junit4] 2> "autoAddReplicas":"false",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "tlogReplicas":"0",
[junit4] 2> "shards":{"shard1":{
[junit4] 2> "range":"80000000-7fffffff",
[junit4] 2> "state":"active",
[junit4] 2> "replicas":{"core_node2":{
[junit4] 2> "core":"control_collection_shard1_replica_n1",
[junit4] 2> "base_url":"http://127.0.0.1:45893/dgfqm/xd",
[junit4] 2> "node_name":"127.0.0.1:45893_dgfqm%2Fxd",
[junit4] 2> "state":"active",
[junit4] 2> "type":"NRT",
[junit4] 2> "leader":"true"}}}}}
[junit4] 2> 1623294 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd c:control_collection s:shard1
x:control_collection_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin]
webapp=null path=/admin/cores
params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT}
status=0 QTime=1785
[junit4] 2> 1623294 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Finished
create command on all shards for collection: control_collection
[junit4] 2> 1623294 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Completed
task:[/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1623303 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Marked task
[/overseer/collection-queue-work/qn-0000000000] as completed.
[junit4] 2> 1623303 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[]
[junit4] 2> 1623303 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1623303 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: [/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1623303 INFO
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
[junit4] 2> 1623303 DEBUG
(OverseerThreadFactory-3607-thread-1-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Overseer
Collection Message Handler: Message
id:/overseer/collection-queue-work/qn-0000000000 complete,
response:{success={127.0.0.1:45893_dgfqm%2Fxd={responseHeader={status=0,QTime=1785},core=control_collection_shard1_replica_n1}}}
[junit4] 2> 1623304 INFO (qtp376042374-12930)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.h.a.CollectionsHandler Wait for new
collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 1623394 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ZkStateWriter going to
update_collection /collections/control_collection/state.json version: 2
[junit4] 2> 1623394 INFO (zkCallback-3530-thread-3) [ ]
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent
state:SyncConnected type:NodeDataChanged
path:/collections/control_collection/state.json] for collection
[control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 1623394 INFO (zkCallback-3530-thread-1) [ ]
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent
state:SyncConnected type:NodeDataChanged
path:/collections/control_collection/state.json] for collection
[control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 1623394 INFO (zkCallback-3530-thread-2) [ ]
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent
state:SyncConnected type:NodeDataChanged
path:/collections/control_collection/state.json] for collection
[control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 1623395 INFO (qtp376042374-12930)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/collections
params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:45893_dgfqm%252Fxd&wt=javabin&version=2}
status=0 QTime=2271
[junit4] 2> 1623399 INFO (zkConnectionManagerCallback-3549-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1623400 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1623401 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:36502/solr
ready
[junit4] 2> 1623401 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection
loss:false
[junit4] 2> 1623402 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.h.a.CollectionsHandler Invoked
Collection Action :create with params
collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2
and sendToOCPQueue=true
[junit4] 2> 1623413 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Got 1 tasks
from work-queue :
[[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ed1]]
[junit4] 2> 1623413 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Marked task
[/overseer/collection-queue-work/qn-0000000002] as running
[junit4] 2> 1623413 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Overseer
Collection Message Handler: Get the message
id:/overseer/collection-queue-work/qn-0000000002 message:{
[junit4] 2> "name":"collection1",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"2",
[junit4] 2> "createNodeSet":"",
[junit4] 2> "stateFormat":"1",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"}
[junit4] 2> 1623414 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Runner
processing /overseer/collection-queue-work/qn-0000000002
[junit4] 2> 1623414 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.OverseerCollectionMessageHandler
OverseerCollectionMessageHandler.processMessage : create , {
[junit4] 2> "name":"collection1",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"2",
[junit4] 2> "createNodeSet":"",
[junit4] 2> "stateFormat":"1",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"}
[junit4] 2> 1623414 INFO
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Create
collection collection1
[junit4] 2> 1623414 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Cleaning up
work-queue. #Running tasks: 1 #Completed tasks: 1
[junit4] 2> 1623415 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskQueue Response ZK path:
/overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may
have disconnected from ZooKeeper
[junit4] 2> 1623416 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[/overseer/collection-queue-work/qn-0000000002]
[junit4] 2> 1623416 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.OverseerCollectionMessageHandler
creating collections conf node /collections/collection1
[junit4] 2> 1623416 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1623416 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: []
[junit4] 2> 1623416 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: [/overseer/collection-queue-work/qn-0000000002]
[junit4] 2> 1623416 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Check for
collection zkNode:collection1
[junit4] 2> 1623417 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Collection
zkNode exists
[junit4] 2> 1623418 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "name":"collection1",
[junit4] 2> "fromApi":"true",
[junit4] 2> "collection.configName":"conf1",
[junit4] 2> "numShards":"2",
[junit4] 2> "createNodeSet":"",
[junit4] 2> "stateFormat":"1",
[junit4] 2> "nrtReplicas":"1",
[junit4] 2> "replicationFactor":"1",
[junit4] 2> "operation":"create"} current state version: 0
[junit4] 2> 1623418 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.ClusterStateMutator building a new
cName: collection1
[junit4] 2> 1623626 WARN
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd It is
unusual to create a collection (collection1) without cores.
[junit4] 2> 1623627 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.a.c.CreateCollectionCmd Finished
create command for collection: collection1
[junit4] 2> 1623627 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Completed
task:[/overseer/collection-queue-work/qn-0000000002]
[junit4] 2> 1623628 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Marked task
[/overseer/collection-queue-work/qn-0000000002] as completed.
[junit4] 2> 1623628 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[]
[junit4] 2> 1623628 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1623628 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: [/overseer/collection-queue-work/qn-0000000002]
[junit4] 2> 1623628 INFO
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: [/overseer/collection-queue-work/qn-0000000002]
[junit4] 2> 1623628 DEBUG
(OverseerThreadFactory-3607-thread-2-processing-n:127.0.0.1:45893_dgfqm%2Fxd)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Overseer
Collection Message Handler: Message
id:/overseer/collection-queue-work/qn-0000000002 complete, response:{}
[junit4] 2> 1623628 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.h.a.CollectionsHandler Wait for new
collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 1623629 INFO (qtp376042374-12933)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.s.HttpSolrCall [admin] webapp=null
path=/admin/collections
params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2}
status=0 QTime=227
[junit4] 2> 1623630 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances
pullReplicaCount=0 numOtherReplicas=4
[junit4] 2> 1623695 INFO (ScheduledTrigger-3605-thread-1) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 1 and last live nodes: 1
[junit4] 2> 1624154 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-1-001
of type NRT
[junit4] 2> 1624155 WARN (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 1624155 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
[junit4] 2> 1624155 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 1624155 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git:
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
[junit4] 2> 1624156 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 1624156 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 1624156 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 1624156 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@2990dbb9{/dgfqm/xd,null,AVAILABLE}
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@10342a0{HTTP/1.1,[http/1.1]}{127.0.0.1:38538}
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.Server Started @1624212ms
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/tempDir-001/jetty1,
solrconfig=solrconfig.xml, hostContext=/dgfqm/xd, hostPort=38538,
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-1-001/cores}
[junit4] 2> 1624157 ERROR (closeThreadPool-3550-thread-1) [ ]
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr™ version
8.0.0
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1624157 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2018-12-06T05:22:16.659Z
[junit4] 2> 1624197 INFO (zkConnectionManagerCallback-3552-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1624238 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1624238 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-1-001/solr.xml
[junit4] 2> 1624241 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay
is ignored
[junit4] 2> 1624241 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 1624251 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 1624708 INFO (ScheduledTrigger-3605-thread-1) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 1 and last live nodes: 1
[junit4] 2> 1625417 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Got 0 tasks
from work-queue : [[]]
[junit4] 2> 1625437 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Cleaning up
work-queue. #Running tasks: 0 #Completed tasks: 1
[junit4] 2> 1625437 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskQueue Response ZK path:
/overseer/collection-queue-work/qnr-0000000002 doesn't exist. Requestor may
have disconnected from ZooKeeper
[junit4] 2> 1625437 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[]
[junit4] 2> 1625437 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1625437 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: []
[junit4] 2> 1625437 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
RunningZKTasks: []
[junit4] 2> 1625652 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:36502/solr
[junit4] 2> 1625654 INFO (zkConnectionManagerCallback-3556-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1625681 INFO (zkConnectionManagerCallback-3558-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1625684 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.c.ZkStateReader Updated live nodes
from ZooKeeper... (0) -> (1)
[junit4] 2> 1625686 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.ZkController Publish
node=127.0.0.1:38538_dgfqm%2Fxd as DOWN
[junit4] 2> 1625687 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.TransientSolrCoreCacheDefault
Allocating transient cache for 4 transient cores
[junit4] 2> 1625687 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.ZkController Register node as live
in ZooKeeper:/live_nodes/127.0.0.1:38538_dgfqm%2Fxd
[junit4] 2> 1625708 INFO (ScheduledTrigger-3605-thread-1) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 1 and last live nodes: 1
[junit4] 2> 1625709 INFO (zkCallback-3530-thread-2) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1625709 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "operation":"downnode",
[junit4] 2> "node_name":"127.0.0.1:38538_dgfqm%2Fxd"} current state
version: 1
[junit4] 2> 1625709 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.NodeMutator DownNode state invoked
for node: 127.0.0.1:38538_dgfqm%2Fxd
[junit4] 2> 1625752 INFO (zkCallback-3537-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1625776 INFO (zkCallback-3548-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1625777 INFO (zkCallback-3557-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1625870 INFO (zkConnectionManagerCallback-3565-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1625871 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.c.ZkStateReader Updated live nodes
from ZooKeeper... (0) -> (2)
[junit4] 2> 1625871 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.s.i.ZkClientClusterStateProvider
Cluster at 127.0.0.1:36502/solr ready
[junit4] 2> 1625875 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.h.a.MetricsHistoryHandler No .system
collection, keeping metrics history in memory.
[junit4] 2> 1625977 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.node' (registry 'solr.node') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1626022 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jvm' (registry 'solr.jvm') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1626022 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jetty' (registry 'solr.jetty') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1626036 INFO (closeThreadPool-3550-thread-1)
[n:127.0.0.1:38538_dgfqm%2Fxd ] o.a.s.c.CorePropertiesLocator Found 0 core
definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-1-001/cores
[junit4] 2> 1626155 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-2-001
of type NRT
[junit4] 2> 1626156 WARN (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 1626156 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
[junit4] 2> 1626156 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 1626156 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git:
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
[junit4] 2> 1626169 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 1626170 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 1626170 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 1626190 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@4670a989{/dgfqm/xd,null,AVAILABLE}
[junit4] 2> 1626190 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@59d78f07{HTTP/1.1,[http/1.1]}{127.0.0.1:35487}
[junit4] 2> 1626190 INFO (closeThreadPool-3550-thread-1) [ ]
o.e.j.s.Server Started @1626246ms
[junit4] 2> 1626190 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/tempDir-001/jetty2,
replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/dgfqm/xd,
hostPort=35487,
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-2-001/cores}
[junit4] 2> 1626191 ERROR (closeThreadPool-3550-thread-1) [ ]
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1626191 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 1626191 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr™ version
8.0.0
[junit4] 2> 1626191 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1626191 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1626191 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2018-12-06T05:22:18.693Z
[junit4] 2> 1626210 INFO (zkConnectionManagerCallback-3568-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1626211 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1626211 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-2-001/solr.xml
[junit4] 2> 1626215 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay
is ignored
[junit4] 2> 1626215 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 1626216 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 1626712 INFO (ScheduledTrigger-3605-thread-1) [ ]
o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with
currently live nodes: 2 and last live nodes: 1
[junit4] 2> 1627043 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:36502/solr
[junit4] 2> 1627077 INFO (zkConnectionManagerCallback-3572-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1627130 INFO (zkConnectionManagerCallback-3574-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1627133 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 1627151 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.ZkController Publish node=127.0.0.1:35487_dgfqm%2Fxd as DOWN
[junit4] 2> 1627152 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.Overseer processMessage: queueSize:
1, message = {
[junit4] 2> "operation":"downnode",
[junit4] 2> "node_name":"127.0.0.1:35487_dgfqm%2Fxd"} current state
version: 1
[junit4] 2> 1627152 DEBUG
(OverseerStateUpdate-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.o.NodeMutator DownNode state invoked
for node: 127.0.0.1:35487_dgfqm%2Fxd
[junit4] 2> 1627152 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 1627152 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:35487_dgfqm%2Fxd
[junit4] 2> 1627153 INFO (zkCallback-3530-thread-2) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1627170 INFO (zkCallback-3548-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1627170 INFO (zkCallback-3537-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1627190 INFO (zkCallback-3564-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1627209 INFO (zkCallback-3557-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1627251 INFO (zkCallback-3573-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1627283 INFO (zkConnectionManagerCallback-3581-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 1627296 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
[junit4] 2> 1627297 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:36502/solr ready
[junit4] 2> 1627297 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history
in memory.
[junit4] 2> 1627363 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node')
enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1627401 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm')
enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1627417 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry
'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5e55ed44
[junit4] 2> 1627418 INFO (closeThreadPool-3550-thread-1) [ ]
o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-2-001/cores
[junit4] 2> 1627448 INFO
(TEST-ShardSplitTest.testSplitStaticIndexReplication-seed#[6047ED57EE908DAC]) [
] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/build/solr-core/test/J1/temp/solr.cloud.api.collections.ShardSplitTest_6047ED57EE908DAC-001/shard-3-001
of type NRT
[junit4] 2> 1627453 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Got 0 tasks
from work-queue : [[]]
[junit4] 2> 1627466 WARN (closeThreadPool-3550-thread-2) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 1627466 INFO (closeThreadPool-3550-thread-2) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
[junit4] 2> 1627466 INFO (closeThreadPool-3550-thread-2) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 1627466 INFO (closeThreadPool-3550-thread-2) [ ]
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git:
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
[junit4] 2> 1627490 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor Cleaning up
work-queue. #Running tasks: 0 #Completed tasks: 0
[junit4] 2> 1627490 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor RunningTasks:
[]
[junit4] 2> 1627490 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor BlockedTasks:
[]
[junit4] 2> 1627490 DEBUG
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fxd ] o.a.s.c.OverseerTaskProcessor
CompletedTasks: []
[junit4] 2> 1627490 INFO
(OverseerCollectionConfigSetProcessor-73788473994969092-127.0.0.1:45893_dgfqm%2Fxd-n_0000000000)
[n:127.0.0.1:45893_dgfqm%2Fx
[...truncated too long message...]
BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
-ivy-fail-disallowed-ivy-version:
ivy-fail:
ivy-fail:
ivy-configure:
[ivy:configure] :: loading settings :: file =
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/lucene/top-level-ivy-settings.xml
resolve:
jar-checksums:
[mkdir] Created dir:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/null228862281
[copy] Copying 240 files to
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/null228862281
[delete] Deleting directory
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-Tests-master/solr/null228862281
check-working-copy:
[ivy:cachepath] :: resolving dependencies ::
org.eclipse.jgit#org.eclipse.jgit-caller;working
[ivy:cachepath] confs: [default]
[ivy:cachepath] found
org.eclipse.jgit#org.eclipse.jgit;4.6.0.201612231935-r in public
[ivy:cachepath] found com.jcraft#jsch;0.1.53 in public
[ivy:cachepath] found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] found org.apache.httpcomponents#httpclient;4.3.6 in
public
[ivy:cachepath] found org.apache.httpcomponents#httpcore;4.3.3 in public
[ivy:cachepath] found commons-logging#commons-logging;1.1.3 in public
[ivy:cachepath] found commons-codec#commons-codec;1.6 in public
[ivy:cachepath] found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 44ms :: artifacts dl 4ms
---------------------------------------------------------------------
| | modules || artifacts |
| conf | number| search|dwnlded|evicted|| number|dwnlded|
---------------------------------------------------------------------
| default | 8 | 0 | 0 | 0 || 8 | 0 |
---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
[wc-checker] SLF4J: Defaulting to no-operation (NOP) logger implementation
[wc-checker] SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for
further details.
[wc-checker] Checking working copy status...
-jenkins-base:
BUILD SUCCESSFUL
Total time: 211 minutes 29 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
at hudson.FilePath$34.hasMatch(FilePath.java:2678)
at hudson.FilePath$34.invoke(FilePath.java:2557)
at hudson.FilePath$34.invoke(FilePath.java:2547)
at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2918)
Also: hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
at
hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
at
hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
at hudson.remoting.Channel.call(Channel.java:955)
at hudson.FilePath.act(FilePath.java:1036)
at hudson.FilePath.act(FilePath.java:1025)
at hudson.FilePath.validateAntFileMask(FilePath.java:2547)
at
hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
at
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
at
hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
at
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
at
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
at hudson.model.Build$BuildExecution.post2(Build.java:186)
at
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
at hudson.model.Run.execute(Run.java:1819)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at
hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2920)
at hudson.remoting.UserRequest.perform(UserRequest.java:212)
at hudson.remoting.UserRequest.perform(UserRequest.java:54)
at hudson.remoting.Request$2.run(Request.java:369)
at
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no
matches found within 10000
at hudson.FilePath.act(FilePath.java:1038)
at hudson.FilePath.act(FilePath.java:1025)
at hudson.FilePath.validateAntFileMask(FilePath.java:2547)
at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
at
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
at
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
at
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
at hudson.model.Build$BuildExecution.post2(Build.java:186)
at
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
at hudson.model.Run.execute(Run.java:1819)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern
"**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]