Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2016/
1 tests failed.
FAILED: org.apache.solr.cloud.MoveReplicaHDFSTest.test
Error Message:
expected not same
Stack Trace:
java.lang.AssertionError: expected not same
at
__randomizedtesting.SeedInfo.seed([C201F022DD32A68D:4A55CFF873CECB75]:0)
at org.junit.Assert.fail(Assert.java:88)
at org.junit.Assert.failSame(Assert.java:819)
at org.junit.Assert.assertNotSame(Assert.java:798)
at org.junit.Assert.assertNotSame(Assert.java:811)
at org.apache.solr.cloud.MoveReplicaTest.test(MoveReplicaTest.java:147)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
Build Log:
[...truncated 15324 lines...]
[junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
[junit4] 2> 4813397 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.s.SolrTestCaseJ4 Created dataDir:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/data-dir-182-001
[junit4] 2> 4813397 WARN
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=14 numCloses=14
[junit4] 2> 4813397 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true)
w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 4813398 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via:
@org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0,
clientAuth=0.0/0.0)
[junit4] 2> 4813398 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.s.SolrTestCaseJ4 SecureRandom sanity checks:
test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 4813436 WARN
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
[junit4] 2> 4813444 WARN
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 4813446 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4813447 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4813447 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4813447 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 4813448 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@139bab63{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 4813583 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.w.WebAppContext@4fdf947d{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-lucene2-us-west.apache.org-33844-hdfs-_-any-12167834953344529272.dir/webapp/,AVAILABLE}{/hdfs}
[junit4] 2> 4813583 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@217ee445{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:33844}
[junit4] 2> 4813583 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.Server Started @4813670ms
[junit4] 2> 4813640 WARN
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 4813641 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4813642 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4813642 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4813642 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4813642 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@4a4d0606{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 4813775 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.w.WebAppContext@647efeec{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-44000-datanode-_-any-9755091353983186414.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 4813775 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@1220cbd4{HTTP/1.1,[http/1.1]}{localhost:44000}
[junit4] 2> 4813775 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.Server Started @4813862ms
[junit4] 2> 4813841 WARN
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 4813842 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4813842 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4813842 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4813842 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4813843 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@c8e4c58{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 4813958 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: Processing first
storage report for DS-dcd28d8c-54df-4a90-b005-aea85723a1e6 from datanode
60b3af80-d51c-46e6-b27a-65908421715b
[junit4] 2> 4813958 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: from storage
DS-dcd28d8c-54df-4a90-b005-aea85723a1e6 node
DatanodeRegistration(127.0.0.1:43383,
datanodeUuid=60b3af80-d51c-46e6-b27a-65908421715b, infoPort=33555,
infoSecurePort=0, ipcPort=46198,
storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks:
0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 4813958 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: Processing first
storage report for DS-bec91b77-a036-4823-b45e-b060a0078b13 from datanode
60b3af80-d51c-46e6-b27a-65908421715b
[junit4] 2> 4813958 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: from storage
DS-bec91b77-a036-4823-b45e-b060a0078b13 node
DatanodeRegistration(127.0.0.1:43383,
datanodeUuid=60b3af80-d51c-46e6-b27a-65908421715b, infoPort=33555,
infoSecurePort=0, ipcPort=46198,
storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks:
0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 4814024 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.w.WebAppContext@75bb30c{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-35274-datanode-_-any-9107664281253266924.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 4814024 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@22cdb802{HTTP/1.1,[http/1.1]}{localhost:35274}
[junit4] 2> 4814024 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.Server Started @4814111ms
[junit4] 2> 4814108 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: Processing first
storage report for DS-f2c3fc80-8e2c-4db3-87ca-9f83dc44c466 from datanode
e9ca55f1-537e-47d5-90f3-74ae94ab9c16
[junit4] 2> 4814108 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: from storage
DS-f2c3fc80-8e2c-4db3-87ca-9f83dc44c466 node
DatanodeRegistration(127.0.0.1:34925,
datanodeUuid=e9ca55f1-537e-47d5-90f3-74ae94ab9c16, infoPort=35127,
infoSecurePort=0, ipcPort=35978,
storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks:
0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
[junit4] 2> 4814109 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: Processing first
storage report for DS-a0c33f60-a9e4-458c-983d-26ce4ebb7ace from datanode
e9ca55f1-537e-47d5-90f3-74ae94ab9c16
[junit4] 2> 4814109 INFO (Block report processor) [ ]
BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: from storage
DS-a0c33f60-a9e4-458c-983d-26ce4ebb7ace node
DatanodeRegistration(127.0.0.1:34925,
datanodeUuid=e9ca55f1-537e-47d5-90f3-74ae94ab9c16, infoPort=35127,
infoSecurePort=0, ipcPort=35978,
storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks:
0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] IGNOR/A 0.00s J2 | MoveReplicaHDFSTest.testFailedMove
[junit4] > Assumption #1: 'awaitsfix' test group is disabled
(@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12080"))
[junit4] IGNOR/A 0.00s J2 | MoveReplicaHDFSTest.testNormalFailedMove
[junit4] > Assumption #1: 'badapple' test group is disabled
(@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028"))
[junit4] 2> 4814497 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.SolrTestCaseJ4 ###Starting testNormalMove
[junit4] 2> 4814498 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002
[junit4] 2> 4814498 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 4814498 INFO (ZkTestServer Run Thread) [ ]
o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 4814499 INFO (ZkTestServer Run Thread) [ ]
o.a.s.c.ZkTestServer Starting server
[junit4] 2> 4814599 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.ZkTestServer start zk server on port:38431
[junit4] 2> 4814599 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:38431
[junit4] 2> 4814599 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:38431
[junit4] 2> 4814599 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.ZkTestServer connecting to 127.0.0.1 38431
[junit4] 2> 4814601 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814603 INFO (zkConnectionManagerCallback-10101-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814603 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814605 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814606 INFO (zkConnectionManagerCallback-10103-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814606 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814607 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814608 INFO (zkConnectionManagerCallback-10105-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814608 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814710 WARN (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814711 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814711 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814711 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814711 WARN (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814712 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814712 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814712 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814712 WARN (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814713 WARN (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git:
afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814715 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814715 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814715 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@69ac8dea{/solr,null,AVAILABLE}
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@6dcb93ff{/solr,null,AVAILABLE}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@1ee635ba{HTTP/1.1,[http/1.1,
h2c]}{127.0.0.1:45595}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.e.j.s.Server Started @4814804ms
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr,
hostPort=45595}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@34b7af99{HTTP/1.1,[http/1.1,
h2c]}{127.0.0.1:43582}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.e.j.s.Server Started @4814804ms
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr,
hostPort=43582}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@62d5ad83{/solr,null,AVAILABLE}
[junit4] 2> 4814717 ERROR (jetty-launcher-10106-thread-2) [ ]
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814717 ERROR (jetty-launcher-10106-thread-4) [ ]
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
9.0.0
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2019-11-12T18:46:36.894496Z
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
9.0.0
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@1d10bc8e{HTTP/1.1,[http/1.1,
h2c]}{127.0.0.1:39856}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2019-11-12T18:46:36.894556Z
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.e.j.s.Server Started @4814804ms
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr,
hostPort=39856}
[junit4] 2> 4814717 ERROR (jetty-launcher-10106-thread-1) [ ]
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
9.0.0
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2019-11-12T18:46:36.894871Z
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814719 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814719 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814719 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@10e11c86{/solr,null,AVAILABLE}
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.AbstractConnector Started ServerConnector@4ea841d5{HTTP/1.1,[http/1.1,
h2c]}{127.0.0.1:40378}
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.e.j.s.Server Started @4814807ms
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr,
hostPort=40378}
[junit4] 2> 4814720 ERROR (jetty-launcher-10106-thread-3) [ ]
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.s.SolrDispatchFilter Using logger factory
org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814720 INFO (zkConnectionManagerCallback-10110-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
9.0.0
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2019-11-12T18:46:36.897513Z
[junit4] 2> 4814721 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814721 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814721 INFO (zkConnectionManagerCallback-10112-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814721 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814723 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay
is ignored
[junit4] 2> 4814723 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814725 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 4814725 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814725 INFO (zkConnectionManagerCallback-10108-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814725 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814726 INFO (zkConnectionManagerCallback-10114-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814726 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814726 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814727 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay
is ignored
[junit4] 2> 4814727 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814728 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay
is ignored
[junit4] 2> 4814728 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814729 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 4814729 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814729 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 4814732 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay
is ignored
[junit4] 2> 4814732 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.SolrXmlConfig Configuration parameter
autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814733 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.SolrXmlConfig MBean server found:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were
configured - adding default JMX reporter.
[junit4] 2> 4814996 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized:
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4814997 WARN (jetty-launcher-10106-thread-3) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@48d9f27e[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4814997 WARN (jetty-launcher-10106-thread-3) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@48d9f27e[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4814999 WARN (jetty-launcher-10106-thread-3) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@5819beb9[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4814999 WARN (jetty-launcher-10106-thread-3) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@5819beb9[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815001 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815002 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815002 INFO (zkConnectionManagerCallback-10122-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815002 INFO (jetty-launcher-10106-thread-3) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815016 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized:
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4815016 WARN (jetty-launcher-10106-thread-4) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@5305c1f6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815016 WARN (jetty-launcher-10106-thread-4) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@5305c1f6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815018 WARN (jetty-launcher-10106-thread-4) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@1e376863[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815018 WARN (jetty-launcher-10106-thread-4) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@1e376863[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815019 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815020 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815021 INFO (zkConnectionManagerCallback-10128-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815021 INFO (jetty-launcher-10106-thread-4) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815210 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized:
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4815210 WARN (jetty-launcher-10106-thread-2) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@5ad9de7[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815211 WARN (jetty-launcher-10106-thread-2) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@5ad9de7[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815213 WARN (jetty-launcher-10106-thread-2) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@3088a09d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815213 WARN (jetty-launcher-10106-thread-2) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@3088a09d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815214 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815215 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815216 INFO (zkConnectionManagerCallback-10136-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815216 INFO (jetty-launcher-10106-thread-2) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815227 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.c.c.ConnectionManager Waiting for client to
connect to ZooKeeper
[junit4] 2> 4815228 INFO (zkConnectionManagerCallback-10138-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815228 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.c.c.ConnectionManager Client is connected
to ZooKeeper
[junit4] 2> 4815300 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.c.OverseerElectionContext I am going to be
the leader 127.0.0.1:43582_solr
[junit4] 2> 4815301 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.c.Overseer Overseer
(id=75722484966359050-127.0.0.1:43582_solr-n_0000000000) starting
[junit4] 2> 4815306 INFO
(OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.Overseer Starting to work on the main
queue : 127.0.0.1:43582_solr
[junit4] 2> 4815306 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:43582_solr
[junit4] 2> 4815307 INFO
(OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (1)
[junit4] 2> 4815321 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.h.a.MetricsHistoryHandler No .system
collection, keeping metrics history in memory.
[junit4] 2> 4815328 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.c.ConnectionManager Waiting for client to
connect to ZooKeeper
[junit4] 2> 4815329 INFO (zkConnectionManagerCallback-10144-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815329 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.c.ConnectionManager Client is connected
to ZooKeeper
[junit4] 2> 4815333 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (1)
[junit4] 2> 4815337 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.ZkController Publish
node=127.0.0.1:40378_solr as DOWN
[junit4] 2> 4815338 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.node' (registry 'solr.node') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815338 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating
transient cache for 4 transient cores
[junit4] 2> 4815338 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:40378_solr
[junit4] 2> 4815339 INFO (zkCallback-10137-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 4815339 INFO (zkCallback-10143-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 4815348 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jvm' (registry 'solr.jvm') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815348 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jetty' (registry 'solr.jetty') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815350 INFO (jetty-launcher-10106-thread-4)
[n:127.0.0.1:43582_solr ] o.a.s.c.CorePropertiesLocator Found 0 core
definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node4/.
[junit4] 2> 4815363 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.h.a.MetricsHistoryHandler No .system
collection, keeping metrics history in memory.
[junit4] 2> 4815380 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.node' (registry 'solr.node') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815391 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jvm' (registry 'solr.jvm') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815391 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jetty' (registry 'solr.jetty') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815393 INFO (jetty-launcher-10106-thread-3)
[n:127.0.0.1:40378_solr ] o.a.s.c.CorePropertiesLocator Found 0 core
definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node3/.
[junit4] 2> 4815419 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized:
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4815420 WARN (jetty-launcher-10106-thread-1) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@191fa1e6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815420 WARN (jetty-launcher-10106-thread-1) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@191fa1e6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815422 WARN (jetty-launcher-10106-thread-1) [ ]
o.e.j.u.s.S.config Trusting all certificates configured for
Client@6d087d73[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815422 WARN (jetty-launcher-10106-thread-1) [ ]
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for
Client@6d087d73[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815424 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815424 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815425 INFO (zkConnectionManagerCallback-10154-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815425 INFO (jetty-launcher-10106-thread-1) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815428 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.c.ConnectionManager Waiting for client to
connect to ZooKeeper
[junit4] 2> 4815429 INFO (zkConnectionManagerCallback-10156-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815429 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.c.ConnectionManager Client is connected
to ZooKeeper
[junit4] 2> 4815433 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (2)
[junit4] 2> 4815437 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.ZkController Publish
node=127.0.0.1:45595_solr as DOWN
[junit4] 2> 4815438 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating
transient cache for 4 transient cores
[junit4] 2> 4815438 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:45595_solr
[junit4] 2> 4815439 INFO (zkCallback-10143-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 4815439 INFO (zkCallback-10137-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 4815439 INFO (zkCallback-10155-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 4815452 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.h.a.MetricsHistoryHandler No .system
collection, keeping metrics history in memory.
[junit4] 2> 4815468 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.node' (registry 'solr.node') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815479 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jvm' (registry 'solr.jvm') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815479 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jetty' (registry 'solr.jetty') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815481 INFO (jetty-launcher-10106-thread-2)
[n:127.0.0.1:45595_solr ] o.a.s.c.CorePropertiesLocator Found 0 core
definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node2/.
[junit4] 2> 4815528 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.c.ConnectionManager Waiting for client to
connect to ZooKeeper
[junit4] 2> 4815529 INFO (zkConnectionManagerCallback-10162-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815529 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.c.ConnectionManager Client is connected
to ZooKeeper
[junit4] 2> 4815534 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from
ZooKeeper... (0) -> (3)
[junit4] 2> 4815537 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.ZkController Publish
node=127.0.0.1:39856_solr as DOWN
[junit4] 2> 4815538 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating
transient cache for 4 transient cores
[junit4] 2> 4815538 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:39856_solr
[junit4] 2> 4815539 INFO (zkCallback-10143-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815539 INFO (zkCallback-10137-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815540 INFO (zkCallback-10161-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815540 INFO (zkCallback-10155-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815553 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.h.a.MetricsHistoryHandler No .system
collection, keeping metrics history in memory.
[junit4] 2> 4815569 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.node' (registry 'solr.node') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815580 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jvm' (registry 'solr.jvm') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815580 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for
'solr.jetty' (registry 'solr.jetty') enabled at server:
com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815582 INFO (jetty-launcher-10106-thread-1)
[n:127.0.0.1:39856_solr ] o.a.s.c.CorePropertiesLocator Found 0 core
definitions underneath
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node1/.
[junit4] 2> 4815635 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=4
[junit4] 2> 4815636 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815637 INFO (zkConnectionManagerCallback-10171-thread-1) [
] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815637 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815639 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
[junit4] 2> 4815640 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:38431/solr ready
[junit4] 2> 4815646 INFO (qtp332877956-36821) [n:127.0.0.1:39856_solr
] o.a.s.h.a.CollectionsHandler Invoked Collection Action :overseerstatus with
params action=OVERSEERSTATUS&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 4815658 INFO (qtp332877956-36821) [n:127.0.0.1:39856_solr
] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections
params={action=OVERSEERSTATUS&wt=javabin&version=2} status=0 QTime=12
[junit4] 2> 4815659 INFO
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ]
o.a.s.c.MoveReplicaTest total_jettys: 4
[junit4] 2> 4815660 INFO (qtp332877956-36826) [n:127.0.0.1:39856_solr
] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params
pullReplicas=1&collection.configName=conf1&maxShardsPerNode=2&autoAddReplicas=false&name=MoveReplicaHDFSTest_coll_false&nrtReplicas=1&action=CREATE&numShards=2&tlogReplicas=0&wt=javabin&version=2
and sendToOCPQueue=true
[junit4] 2> 4815663 INFO
(OverseerThreadFactory-9837-thread-2-processing-n:127.0.0.1:43582_solr)
[n:127.0.0.1:43582_solr ] o.a.s.c.a.c.CreateCollectionCmd Create collection
MoveReplicaHDFSTest_coll_false
[junit4] 2> 4815663 INFO
(OverseerCollectionConfigSetProcessor-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.OverseerTaskQueue Response ZK path:
/overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may
have disconnected from ZooKeeper
[junit4] 2> 4815798 INFO
(OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n1",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:43582/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4815800 INFO
(OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_p2",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:40378/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4815802 INFO
(OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n4",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:39856/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4815804 INFO
(OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000)
[n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_p6",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:45595/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4816008 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 4816008 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 4816009 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4
transient cores
[junit4] 2> 4816009 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_p2&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 4816016 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.h.a.CoreAdminOperation core create command
qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_p6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 4817022 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 9.0.0
[junit4] 2> 4817022 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 9.0.0
[junit4] 2> 4817026 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 9.0.0
[junit4] 2> 4817031 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 9.0.0
[junit4] 2> 4817032 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.IndexSchema
[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] Schema name=minimal
[junit4] 2> 4817034 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.IndexSchema Loaded
schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817034 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.CoreContainer
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_p2' using
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817034 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.m.r.SolrJmxReporter
JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p2'
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p2') enabled
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817035 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema
[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Schema name=minimal
[junit4] 2> 4817037 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory
solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817037 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817037 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrCore
[[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] ] Opening new SolrCore at
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node3/MoveReplicaHDFSTest_coll_false_shard1_replica_p2],
dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/]
[junit4] 2> 4817037 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema
[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Schema name=minimal
[junit4] 2> 4817038 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema Loaded
schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817038 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.CoreContainer
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n4' using
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817038 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.m.r.SolrJmxReporter
JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4'
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4') enabled
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817039 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/snapshot_metadata
[junit4] 2> 4817045 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory
solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817046 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817046 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore
[[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] ] Opening new SolrCore at
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node1/MoveReplicaHDFSTest_coll_false_shard2_replica_n4],
dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/]
[junit4] 2> 4817046 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded
schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817046 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.CoreContainer
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n1' using
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817046 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema
[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] Schema name=minimal
[junit4] 2> 4817046 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter
JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1'
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1') enabled
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817047 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory
solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817047 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817047 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrCore
[[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] ] Opening new SolrCore at
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node4/MoveReplicaHDFSTest_coll_false_shard1_replica_n1],
dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/]
[junit4] 2> 4817047 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/snapshot_metadata
[junit4] 2> 4817048 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/snapshot_metadata
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema Loaded
schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.CoreContainer
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_p6' using
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.m.r.SolrJmxReporter
JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6'
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6') enabled
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory
solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrCore
[[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] ] Opening new SolrCore at
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node2/MoveReplicaHDFSTest_coll_false_shard2_replica_p6],
dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/]
[junit4] 2> 4817049 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/snapshot_metadata
[junit4] 2> 4817052 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817052 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817052 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory Creating new global HDFS BlockCache
[junit4] 2> 4817054 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817054 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817054 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817054 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817055 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817055 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817073 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817073 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817074 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817074 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data
[junit4] 2> 4817075 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data
[junit4] 2> 4817075 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data
[junit4] 2> 4817083 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817084 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data
[junit4] 2> 4817092 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/index
[junit4] 2> 4817098 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817098 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817099 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/index
[junit4] 2> 4817100 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/index
[junit4] 2> 4817101 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817118 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817119 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817119 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817120 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory creating directory factory for path
hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/index
[junit4] 2> 4817121 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817123 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817125 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817127 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct
memory allocation set to [true]
[junit4] 2> 4817127 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ]
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of
[33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817132 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8
x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory
Block cache on write is disabled
[junit4] 2> 4817238 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 4817238 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 4817245 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 4817245 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 4817245 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog
Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 4817246 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 4817246 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 4817246 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.HdfsUpdateLog
Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 4817252 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.SolrIndexSearcher
Opening [Searcher@4fb2eb70[MoveReplicaHDFSTest_coll_false_shard1_replica_p2]
main]
[junit4] 2> 4817254 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 4817254 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 4817255 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000ms.
[junit4] 2> 4817256 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 4817256 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 4817274 INFO
(searcherExecutor-9857-thread-1-processing-n:127.0.0.1:40378_solr
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrCore
[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] Registered new searcher
Searcher@4fb2eb70[MoveReplicaHDFSTest_coll_false_shard1_replica_p2]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 4817276 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 4817276 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 4817277 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.ZkController
MoveReplicaHDFSTest_coll_false_shard1_replica_p2 starting background
replication from leader
[junit4] 2> 4817277 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ]
o.a.s.c.ReplicateFromLeader Will start replication from leader with poll
interval: 00:00:01
[junit4] 2> 4817279 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.ReplicationHandler
Poll scheduled at an interval of 1000ms
[junit4] 2> 4817279 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5
x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000ms.
[junit4] 2> 4817285 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.SolrIndexSearcher
Opening [Searcher@76d745ac[MoveReplicaHDFSTest_coll_false_shard2_replica_n4]
main]
[junit4] 2> 4817286 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 4817287 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 4817287 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher
Opening [Searcher@58ad5b4c[MoveReplicaHDFSTest_coll_false_shard1_replica_n1]
main]
[junit4] 2> 4817287 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000ms.
[junit4] 2> 4817287 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Could
not find max version in index or recent updates, using new clock
1650022835252363264
[junit4] 2> 4817288 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 4817288 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 4817289 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000ms.
[junit4] 2> 4817289 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateLog Could
not find max version in index or recent updates, using new clock
1650022835254460416
[junit4] 2> 4817290 INFO
(searcherExecutor-9858-thread-1-processing-n:127.0.0.1:39856_solr
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore
[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Registered new searcher
Searcher@76d745ac[MoveReplicaHDFSTest_coll_false_shard2_replica_n4]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 4817292 INFO
(searcherExecutor-9859-thread-1-processing-n:127.0.0.1:43582_solr
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrCore
[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Registered new searcher
Searcher@58ad5b4c[MoveReplicaHDFSTest_coll_false_shard1_replica_n1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 4817294 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ZkShardTerms
Successful update of terms at
/collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to
Terms{values={core_node7=0}, version=0}
[junit4] 2> 4817294 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created
/collections/MoveReplicaHDFSTest_coll_false/leaders/shard2
[junit4] 2> 4817295 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.ZkShardTerms
Successful update of terms at
/collections/MoveReplicaHDFSTest_coll_false/terms/shard1 to
Terms{values={core_node3=0}, version=0}
[junit4] 2> 4817295 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ]
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created
/collections/MoveReplicaHDFSTest_coll_false/leaders/shard1
[junit4] 2> 4817299 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ]
o.a.s.c.ShardLeaderElectionContext Enough replicas found to con
[...truncated too long message...]
ng
[junit4] 2> 4840875 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.s.ServletContextHandler@c8e4c58{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> 4840877 WARN (BP-1876136977-127.0.0.1-1573584395591
heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ]
o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager
interrupted
[junit4] 2> 4840877 WARN (BP-1876136977-127.0.0.1-1573584395591
heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ]
o.a.h.h.s.d.DataNode Ending block pool service for: Block pool
BP-1876136977-127.0.0.1-1573584395591 (Datanode Uuid
e9ca55f1-537e-47d5-90f3-74ae94ab9c16) service to
lucene2-us-west.apache.org/127.0.0.1:43389
[junit4] 2> 4840885 WARN
(refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data3/current/BP-1876136977-127.0.0.1-1573584395591)
[ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk
information: sleep interrupted
[junit4] 2> 4840893 WARN
(refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data4/current/BP-1876136977-127.0.0.1-1573584395591)
[ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk
information: sleep interrupted
[junit4] 2> 4840894 WARN
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called
[junit4] 2> 4840903 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.w.WebAppContext@647efeec{datanode,/,null,UNAVAILABLE}{/datanode}
[junit4] 2> 4840903 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.AbstractConnector Stopped
ServerConnector@1220cbd4{HTTP/1.1,[http/1.1]}{localhost:0}
[junit4] 2> 4840903 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 4840903 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.s.ServletContextHandler@4a4d0606{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> 4840906 WARN (BP-1876136977-127.0.0.1-1573584395591
heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ]
o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager
interrupted
[junit4] 2> 4840906 WARN (BP-1876136977-127.0.0.1-1573584395591
heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ]
o.a.h.h.s.d.DataNode Ending block pool service for: Block pool
BP-1876136977-127.0.0.1-1573584395591 (Datanode Uuid
60b3af80-d51c-46e6-b27a-65908421715b) service to
lucene2-us-west.apache.org/127.0.0.1:43389
[junit4] 2> 4840913 WARN
(refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data1/current/BP-1876136977-127.0.0.1-1573584395591)
[ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk
information: sleep interrupted
[junit4] 2> 4840920 WARN
(refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data2/current/BP-1876136977-127.0.0.1-1573584395591)
[ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk
information: sleep interrupted
[junit4] 2> 4840928 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.w.WebAppContext@4fdf947d{hdfs,/,null,UNAVAILABLE}{/hdfs}
[junit4] 2> 4840929 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.AbstractConnector Stopped
ServerConnector@217ee445{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:0}
[junit4] 2> 4840929 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 4840929 INFO
(SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.s.ServletContextHandler@139bab63{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> NOTE: leaving temporary files on disk at:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001
[junit4] 2> Nov 12, 2019 6:47:03 PM
com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 32 leaked
thread(s).
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene80):
{_root_=PostingsFormat(name=LuceneVarGapDocFreqInterval),
id=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128)))},
docValues:{_version_=DocValuesFormat(name=Asserting)},
maxPointsInLeafNode=1871, maxMBSortInHeap=6.2570705227755194,
sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@4e364a29),
locale=seh-MZ, timezone=SystemV/YST9
[junit4] 2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.4
(64-bit)/cpus=4,threads=5,free=59608232,total=536870912
[junit4] 2> NOTE: All tests run in this JVM: [SolrRrdBackendFactoryTest,
TestJoin, UpdateRequestProcessorFactoryTest, TestJavabinTupleStreamParser,
AnalysisErrorHandlingTest, OverseerModifyCollectionTest, TestBinaryField,
TestSimExecutePlanAction, NodeAddedTriggerIntegrationTest,
TestBM25SimilarityFactory, AsyncCallRequestStatusResponseTest, TestPolicyCloud,
LukeRequestHandlerTest, TestElisionMultitermQuery, TestXmlQParser,
TestLegacyField, ReturnFieldsTest, SolrXmlInZkTest,
HdfsUnloadDistributedZkTest, SimpleFacetsTest,
ManagedSchemaRoundRobinCloudTest, CdcrRequestHandlerTest,
ConfigureRecoveryStrategyTest, TestFaceting, CursorMarkTest,
SolrTestCaseJ4Test, ZkControllerTest, BasicAuthOnSingleNodeTest,
IndexSizeEstimatorTest, TokenizerChainTest, TestSystemIdResolver,
TestPostingsSolrHighlighter, DeleteStatusTest, DeleteNodeTest,
TestStressReorder, BadCopyFieldTest, TestClusterProperties,
TestReplicationHandlerBackup, DistributedIntervalFacetingTest,
TestSortByMinMaxFunction, URLClassifyProcessorTest, TestHashPartitioner,
SpellCheckComponentTest, DistributedSpellCheckComponentTest,
SolrIndexMetricsTest, TestMiniSolrCloudClusterSSL, DocValuesMultiTest,
HLLSerializationTest, AnalyticsMergeStrategyTest, MetricsHistoryHandlerTest,
TestSolrConfigHandlerCloud, RemoteQueryErrorTest, TestHalfAndHalfDocValues,
SolrCoreTest, PhrasesIdentificationComponentTest, ClusterStateTest,
BasicDistributedZkTest, SpellCheckCollatorTest, OrderedExecutorTest,
HighlighterConfigTest, TestNoOpRegenerator, OverseerTaskQueueTest,
TestCoreContainer, JWTAuthPluginTest, TestHttpServletCarrier,
TestSnapshotCloudManager, QueryResultKeyTest, HttpPartitionTest,
TestHttpShardHandlerFactory, BlockJoinFacetDistribTest, TestReplicaProperties,
HdfsDirectoryTest, TestPullReplica, ChangedSchemaMergeTest, SoftAutoCommitTest,
TestShardHandlerFactory, TestScoreJoinQPNoScore, TestDistribDocBasedVersion,
TestSolr4Spatial2, TestLMJelinekMercerSimilarityFactory,
ClassificationUpdateProcessorFactoryTest, TestDynamicLoadingUrl,
PingRequestHandlerTest, ConnectionReuseTest, CustomHighlightComponentTest,
TestCloudRecovery, ScheduledMaintenanceTriggerTest, TestExactSharedStatsCache,
LegacyCloudClusterPropTest, TestSolrJ, TestFieldCacheSort,
ExternalFileFieldSortTest, TestRandomFlRTGCloud, UnloadDistributedZkTest,
TestManagedSchemaThreadSafety, DistributedQueryComponentCustomSortTest,
TestSolrCloudWithHadoopAuthPlugin, SmileWriterTest, OutputWriterTest,
TestStandardQParsers, TriggerIntegrationTest, TestMacroExpander,
TestSchemaVersionResource, CoreAdminRequestStatusTest, TestIndexSearcher,
CircularListTest, TestDocTermOrdsUninvertLimit, TestSubQueryTransformerDistrib,
FieldAnalysisRequestHandlerTest, TestCloudPhrasesIdentificationComponent,
TestCollectionsAPIViaSolrCloudCluster,
IgnoreCommitOptimizeUpdateProcessorFactoryTest, TestExportWriter,
TestTolerantSearch, CollectionsAPIAsyncDistributedZkTest, SliceStateTest,
TestNumericRangeQuery64, TaggingAttributeTest, TestPerFieldSimilarity,
TestSimComputePlanAction, TestFieldResource, CdcrBootstrapTest,
TestStressCloudBlindAtomicUpdates, TestRangeQuery, TimeZoneUtilsTest,
TestSimPolicyCloud, UninvertDocValuesMergePolicyTest, ZkShardTermsTest,
TestComponentsName, DistributedFacetPivotLargeTest, ResponseLogComponentTest,
TestFunctionQuery, TestZkChroot, TestDFRSimilarityFactory, GraphQueryTest,
TestCoreBackup, NodeLostTriggerTest, CustomCollectionTest, TestRecovery,
TestSolrCloudSnapshots, TestSolrQueryResponse, DistributedDebugComponentTest,
NestedAtomicUpdateTest, QueryEqualityTest,
ChaosMonkeySafeLeaderWithPullReplicasTest, ConnectionManagerTest,
CreateRoutedAliasTest, DistribDocExpirationUpdateProcessorTest,
DistribJoinFromCollectionTest, DistributedQueueTest, DocValuesNotIndexedTest,
ForceLeaderTest, LeaderElectionContextKeyTest, LeaderElectionIntegrationTest,
LeaderFailoverAfterPartitionTest, MetricsHistoryWithAuthIntegrationTest,
MigrateRouteKeyTest, MoveReplicaHDFSTest]
[junit4] Completed [502/888 (1!)] on J2 in 30.62s, 4 tests, 1 failure, 2
skipped <<< FAILURES!
[...truncated 54210 lines...]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]