Build: https://builds.apache.org/job/Lucene-Solr-Tests-7.x/869/

1 tests failed.
FAILED:  org.apache.solr.cloud.MoveReplicaHDFSTest.testFailedMove

Error Message:
No live SolrServers available to handle this 
request:[https://127.0.0.1:44949/solr/MoveReplicaHDFSTest_failed_coll_true, 
https://127.0.0.1:40305/solr/MoveReplicaHDFSTest_failed_coll_true]

Stack Trace:
org.apache.solr.client.solrj.SolrServerException: No live SolrServers available 
to handle this 
request:[https://127.0.0.1:44949/solr/MoveReplicaHDFSTest_failed_coll_true, 
https://127.0.0.1:40305/solr/MoveReplicaHDFSTest_failed_coll_true]
        at 
__randomizedtesting.SeedInfo.seed([CCFCCEB35196AFAA:66311D41E6457A7A]:0)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:462)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:1107)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:884)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:994)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:817)
        at 
org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:194)
        at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:942)
        at 
org.apache.solr.cloud.MoveReplicaTest.testFailedMove(MoveReplicaTest.java:289)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1742)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:935)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:971)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:985)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:944)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:830)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:880)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:891)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:748)
Caused by: 
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error 
from server at 
https://127.0.0.1:44949/solr/MoveReplicaHDFSTest_failed_coll_true: no servers 
hosting shard: shard1
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:643)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:255)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:244)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:483)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:436)
        ... 46 more




Build Log:
[...truncated 13527 lines...]
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4]   2> Creating dataDir: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/init-core-data-001
   [junit4]   2> 937996 WARN  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=4 numCloses=4
   [junit4]   2> 937996 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) 
w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 937997 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: 
@org.apache.solr.util.RandomizeSSL(reason=, ssl=NaN, value=NaN, clientAuth=NaN)
   [junit4]   2> 937998 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: 
test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 937998 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001
   [junit4]   2> 937999 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 937999 INFO  (Thread-2618) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 937999 INFO  (Thread-2618) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 938000 ERROR (Thread-2618) [    ] o.a.z.s.ZooKeeperServer 
ZKShutdownHandler is not registered, so ZooKeeper server won't take any action 
on ERROR or SHUTDOWN server state changes
   [junit4]   2> 938099 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.c.ZkTestServer start zk server on port:40620
   [junit4]   2> 938101 INFO  (zkConnectionManagerCallback-2776-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938104 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] 
o.a.z.s.NIOServerCnxn Unable to read additional data from client sessionid 
0x100e2c215380000, likely client has closed socket
   [junit4]   2> 938106 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: 
d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 938106 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: 
d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 938107 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: 
d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 938107 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: 
d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 938108 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 938108 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 938109 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 938109 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@5d7282eb{/solr,null,AVAILABLE}
   [junit4]   2> 938109 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 938109 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 938109 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 938109 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@2c995706{/solr,null,AVAILABLE}
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@7d2456a{SSL,[ssl, 
http/1.1]}{127.0.0.1:45953}
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.e.j.s.Server Started @938162ms
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=45953}
   [junit4]   2> 938111 ERROR (jetty-launcher-2773-thread-3) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.5.0
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 938111 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2018-09-09T17:53:32.271Z
   [junit4]   2> 938112 INFO  (zkConnectionManagerCallback-2778-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938113 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@2940e4c4{SSL,[ssl, 
http/1.1]}{127.0.0.1:40305}
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.e.j.s.Server Started @938169ms
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=40305}
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 938118 ERROR (jetty-launcher-2773-thread-2) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.5.0
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@436b048e{/solr,null,AVAILABLE}
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2018-09-09T17:53:32.279Z
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@7759f5f8{SSL,[ssl, 
http/1.1]}{127.0.0.1:44949}
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.e.j.s.Server Started @938170ms
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=44949}
   [junit4]   2> 938119 ERROR (jetty-launcher-2773-thread-1) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.5.0
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 938119 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2018-09-09T17:53:32.279Z
   [junit4]   2> 938118 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 938120 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 938120 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@2ffb2320{/solr,null,AVAILABLE}
   [junit4]   2> 938121 INFO  (zkConnectionManagerCallback-2780-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@129518f{SSL,[ssl, 
http/1.1]}{127.0.0.1:41876}
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.e.j.s.Server Started @938172ms
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=41876}
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 938121 ERROR (jetty-launcher-2773-thread-4) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.5.0
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 938121 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2018-09-09T17:53:32.281Z
   [junit4]   2> 938123 INFO  (zkConnectionManagerCallback-2782-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938125 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 938125 INFO  (zkConnectionManagerCallback-2784-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938128 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 938240 INFO  (jetty-launcher-2773-thread-1) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40620/solr
   [junit4]   2> 938244 INFO  (zkConnectionManagerCallback-2788-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938245 INFO  (zkConnectionManagerCallback-2790-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938258 INFO  (jetty-launcher-2773-thread-3) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40620/solr
   [junit4]   2> 938260 INFO  (zkConnectionManagerCallback-2796-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938261 INFO  (zkConnectionManagerCallback-2798-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938270 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.OverseerElectionContext I am going to be 
the leader 127.0.0.1:45953_solr
   [junit4]   2> 938271 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.Overseer Overseer 
(id=72306917245452296-127.0.0.1:45953_solr-n_0000000000) starting
   [junit4]   2> 938276 INFO  (zkConnectionManagerCallback-2805-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938278 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:40620/solr ready
   [junit4]   2> 938278 INFO  
(OverseerStateUpdate-72306917245452296-127.0.0.1:45953_solr-n_0000000000) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.Overseer Starting to work on the main 
queue : 127.0.0.1:45953_solr
   [junit4]   2> 938280 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:45953_solr
   [junit4]   2> 938296 INFO  (zkCallback-2797-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 938299 INFO  (zkCallback-2804-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 938304 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 938310 INFO  (jetty-launcher-2773-thread-2) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40620/solr
   [junit4]   2> 938312 INFO  (zkConnectionManagerCallback-2810-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938315 INFO  (zkConnectionManagerCallback-2812-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938317 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (1)
   [junit4]   2> 938322 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 2147483647 transient cores
   [junit4]   2> 938322 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:44949_solr
   [junit4]   2> 938323 INFO  (zkCallback-2797-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 938323 INFO  (zkCallback-2804-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 938323 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (2)
   [junit4]   2> 938326 INFO  (zkCallback-2789-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 938327 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_45953.solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938327 INFO  (jetty-launcher-2773-thread-4) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40620/solr
   [junit4]   2> 938328 INFO  (zkConnectionManagerCallback-2818-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938331 INFO  (zkConnectionManagerCallback-2820-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938333 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 2147483647 transient cores
   [junit4]   2> 938333 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:40305_solr
   [junit4]   2> 938334 INFO  (zkCallback-2804-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 938334 INFO  (zkCallback-2797-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 938334 INFO  (zkCallback-2789-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 938334 INFO  (zkCallback-2811-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 938335 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_45953.solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938335 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_45953.solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938336 INFO  (jetty-launcher-2773-thread-3) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node3/.
   [junit4]   2> 938340 INFO  (zkConnectionManagerCallback-2825-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938341 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (3)
   [junit4]   2> 938346 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:40620/solr ready
   [junit4]   2> 938348 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (3)
   [junit4]   2> 938351 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 2147483647 transient cores
   [junit4]   2> 938351 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:41876_solr
   [junit4]   2> 938352 INFO  (zkCallback-2797-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 938352 INFO  (zkCallback-2804-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 938352 INFO  (zkCallback-2789-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 938353 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 938353 INFO  (zkCallback-2811-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 938353 INFO  (zkCallback-2819-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 938360 INFO  (zkCallback-2824-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 938361 INFO  (zkConnectionManagerCallback-2833-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938361 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (4)
   [junit4]   2> 938362 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:40620/solr ready
   [junit4]   2> 938362 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 938385 INFO  (zkConnectionManagerCallback-2839-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938386 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (4)
   [junit4]   2> 938387 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:40620/solr ready
   [junit4]   2> 938387 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_44949.solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938388 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_40305.solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938392 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 938395 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_44949.solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938395 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_44949.solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938396 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_40305.solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938396 INFO  (jetty-launcher-2773-thread-1) 
[n:127.0.0.1:44949_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node1/.
   [junit4]   2> 938396 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_40305.solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938397 INFO  (jetty-launcher-2773-thread-2) 
[n:127.0.0.1:40305_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node2/.
   [junit4]   2> 938403 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_41876.solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938410 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_41876.solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938410 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_41876.solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 938420 INFO  (jetty-launcher-2773-thread-4) 
[n:127.0.0.1:41876_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node4/.
   [junit4]   2> 938487 INFO  (zkConnectionManagerCallback-2842-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938490 INFO  (zkConnectionManagerCallback-2847-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 938490 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 938491 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40620/solr ready
   [junit4]   2> 938516 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr    
] o.a.s.h.a.CollectionsHandler Invoked Collection Action :overseerstatus with 
params action=OVERSEERSTATUS&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 938529 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr    
] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections 
params={action=OVERSEERSTATUS&wt=javabin&version=2} status=0 QTime=13
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 938565 WARN  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried 
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 938575 WARN  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 938576 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
jetty-6.1.26
   [junit4]   2> 938595 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/hdfs
 to ./temp/Jetty_localhost_36914_hdfs____r35bb7/webapp
   [junit4]   2> 938941 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:36914
   [junit4]   2> 939007 WARN  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 939008 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
jetty-6.1.26
   [junit4]   2> 939020 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/datanode
 to ./temp/Jetty_localhost_38639_datanode____41tnge/webapp
   [junit4]   2> 939350 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:38639
   [junit4]   2> 939394 WARN  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 939396 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
jetty-6.1.26
   [junit4]   2> 939427 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/datanode
 to ./temp/Jetty_localhost_46093_datanode____eqh4rz/webapp
   [junit4]   2> 939505 ERROR (DataNode: 
[[[DISK]file:/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-002/hdfsBaseDir/data/data1/,
 
[DISK]file:/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-002/hdfsBaseDir/data/data2/]]
  heartbeating to localhost/127.0.0.1:43732) [    ] 
o.a.h.h.s.d.DirectoryScanner 
dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value below 1 
ms/sec. Assuming default value of 1000
   [junit4]   2> 939517 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0xd840e52aabc24: from storage 
DS-60016a67-60d6-4b08-a181-d021b47031e5 node 
DatanodeRegistration(127.0.0.1:42147, 
datanodeUuid=6ccb6367-b5a2-46c6-ab02-14ec992210c8, infoPort=37047, 
infoSecurePort=0, ipcPort=33266, 
storageInfo=lv=-56;cid=testClusterID;nsid=1102441801;c=0), blocks: 0, 
hasStaleStorage: true, processing time: 1 msecs
   [junit4]   2> 939517 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0xd840e52aabc24: from storage 
DS-4356fac7-3b77-4733-87bc-bb5ee431f764 node 
DatanodeRegistration(127.0.0.1:42147, 
datanodeUuid=6ccb6367-b5a2-46c6-ab02-14ec992210c8, infoPort=37047, 
infoSecurePort=0, ipcPort=33266, 
storageInfo=lv=-56;cid=testClusterID;nsid=1102441801;c=0), blocks: 0, 
hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 939819 INFO  
(SUITE-MoveReplicaHDFSTest-seed#[CCFCCEB35196AFAA]-worker) [    ] o.m.log 
Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:46093
   [junit4]   2> 939961 ERROR (DataNode: 
[[[DISK]file:/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-002/hdfsBaseDir/data/data3/,
 
[DISK]file:/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-002/hdfsBaseDir/data/data4/]]
  heartbeating to localhost/127.0.0.1:43732) [    ] 
o.a.h.h.s.d.DirectoryScanner 
dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value below 1 
ms/sec. Assuming default value of 1000
   [junit4]   2> 939977 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0xd840e6e2007ba: from storage 
DS-0fdbea41-a417-4bfd-a57a-6ce16f001a35 node 
DatanodeRegistration(127.0.0.1:37546, 
datanodeUuid=4b1c621c-4b55-44fe-a910-7bfd710b5a52, infoPort=40353, 
infoSecurePort=0, ipcPort=37175, 
storageInfo=lv=-56;cid=testClusterID;nsid=1102441801;c=0), blocks: 0, 
hasStaleStorage: true, processing time: 0 msecs
   [junit4]   2> 939978 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0xd840e6e2007ba: from storage 
DS-61ea4dfa-2224-4a4e-b6a3-092e7c342593 node 
DatanodeRegistration(127.0.0.1:37546, 
datanodeUuid=4b1c621c-4b55-44fe-a910-7bfd710b5a52, infoPort=40353, 
infoSecurePort=0, ipcPort=37175, 
storageInfo=lv=-56;cid=testClusterID;nsid=1102441801;c=0), blocks: 0, 
hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 940274 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.SolrTestCaseJ4 ###Starting testNormalMove
   [junit4]   2> 940275 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 940277 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: 
d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 940278 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 940278 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 940278 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 940278 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@cd5b02{/solr,null,AVAILABLE}
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@4038844b{SSL,[ssl, 
http/1.1]}{127.0.0.1:42663}
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.e.j.s.Server Started @940330ms
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=42663}
   [junit4]   2> 940279 ERROR 
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.5.0
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 940279 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 940280 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2018-09-09T17:53:34.439Z
   [junit4]   2> 940281 INFO  (zkConnectionManagerCallback-2851-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 940281 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 940513 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40620/solr
   [junit4]   2> 940514 INFO  (zkConnectionManagerCallback-2855-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 940516 INFO  (zkConnectionManagerCallback-2857-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 940520 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (4)
   [junit4]   2> 940522 INFO  
(OverseerCollectionConfigSetProcessor-72306917245452296-127.0.0.1:45953_solr-n_0000000000)
 [n:127.0.0.1:45953_solr    ] o.a.s.c.OverseerTaskQueue Response ZK path: 
/overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may 
have disconnected from ZooKeeper
   [junit4]   2> 940524 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 2147483647 transient cores
   [junit4]   2> 940524 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:42663_solr
   [junit4]   2> 940525 INFO  (zkCallback-2789-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2846-thread-2) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2824-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2832-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2811-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2819-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2797-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940525 INFO  (zkCallback-2804-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940526 INFO  (zkCallback-2846-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940526 INFO  (zkCallback-2838-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940526 INFO  (zkCallback-2856-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 940544 INFO  (zkConnectionManagerCallback-2864-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 940545 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (5)
   [junit4]   2> 940546 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:40620/solr ready
   [junit4]   2> 940546 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 940562 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_42663.solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 940570 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_42663.solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 940570 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr_42663.solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 940572 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) 
[n:127.0.0.1:42663_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node5/.
   [junit4]   2> 940622 INFO  (zkConnectionManagerCallback-2867-thread-1) [    
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 940623 INFO  
(TEST-MoveReplicaHDFSTest.testNormalMove-seed#[CCFCCEB35196AFAA]) [    ] 
o.a.s.c.MoveReplicaTest total_jettys: 5
   [junit4]   2> 940637 INFO  (qtp110358908-11313) [n:127.0.0.1:40305_solr    ] 
o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params 
collection.configName=conf1&maxShardsPerNode=2&autoAddReplicas=false&name=MoveReplicaHDFSTest_coll_false&nrtReplicas=2&action=CREATE&numShards=2&wt=javabin&version=2
 and sendToOCPQueue=true
   [junit4]   2> 940639 INFO  
(OverseerThreadFactory-5272-thread-2-processing-n:127.0.0.1:45953_solr) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection 
MoveReplicaHDFSTest_coll_false
   [junit4]   2> 940746 INFO  
(OverseerStateUpdate-72306917245452296-127.0.0.1:45953_solr-n_0000000000) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:40305/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 940748 INFO  
(OverseerStateUpdate-72306917245452296-127.0.0.1:45953_solr-n_0000000000) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:44949/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 940751 INFO  
(OverseerStateUpdate-72306917245452296-127.0.0.1:45953_solr-n_0000000000) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:45953/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 940753 INFO  
(OverseerStateUpdate-72306917245452296-127.0.0.1:45953_solr-n_0000000000) 
[n:127.0.0.1:45953_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:42663/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 940965 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr    
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 940968 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr    
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 940969 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr    
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 
transient cores
   [junit4]   2> 940995 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr    
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n2&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 940999 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr    
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.h.a.CoreAdminOperation core create command 
qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 941994 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.5.0
   [junit4]   2> 942009 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.5.0
   [junit4]   2> 942017 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.5.0
   [junit4]   2> 942019 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.5.0
   [junit4]   2> 942021 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.s.IndexSchema 
[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Schema name=minimal
   [junit4]   2> 942023 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.IndexSchema 
[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 942024 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.s.IndexSchema 
[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 942025 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.IndexSchema 
[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 942025 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 942025 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.CoreContainer 
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n1' using 
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 942025 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 942026 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.CoreContainer 
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n2' using 
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 942026 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.m.r.SolrJmxReporter 
JMX monitoring for 
'solr_44949.solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n2' 
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n2') enabled 
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 942026 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter 
JMX monitoring for 
'solr_40305.solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1' 
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1') enabled 
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 942026 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:43732/data
   [junit4]   2> 942026 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 942026 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrCore 
[[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node1/MoveReplicaHDFSTest_coll_false_shard1_replica_n2],
 
dataDir=[hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node5/data/]
   [junit4]   2> 942028 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 942028 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node5/data/snapshot_metadata
   [junit4]   2> 942028 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.CoreContainer 
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n4' using 
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 942028 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.m.r.SolrJmxReporter 
JMX monitoring for 
'solr_45953.solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4' 
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4') enabled 
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 942028 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:43732/data
   [junit4]   2> 942028 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 942028 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrCore 
[[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node3/MoveReplicaHDFSTest_coll_false_shard2_replica_n4],
 
dataDir=[hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node7/data/]
   [junit4]   2> 942029 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node7/data/snapshot_metadata
   [junit4]   2> 942026 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 942032 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.CoreContainer 
Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n6' using 
configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 942032 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.m.r.SolrJmxReporter 
JMX monitoring for 
'solr_42663.solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n6' 
(registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n6') enabled 
at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a98e004
   [junit4]   2> 942033 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:43732/data
   [junit4]   2> 942034 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 942034 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrCore 
[[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node5/MoveReplicaHDFSTest_coll_false_shard2_replica_n6],
 
dataDir=[hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node8/data/]
   [junit4]   2> 942037 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:43732/data
   [junit4]   2> 942037 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 942037 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrCore 
[[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_CCFCCEB35196AFAA-001/tempDir-001/node2/MoveReplicaHDFSTest_coll_false_shard1_replica_n1],
 
dataDir=[hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node3/data/]
   [junit4]   2> 942039 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node3/data/snapshot_metadata
   [junit4]   2> 942046 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node8/data/snapshot_metadata
   [junit4]   2> 942088 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node5/data
   [junit4]   2> 942088 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node3/data
   [junit4]   2> 942092 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node8/data
   [junit4]   2> 942094 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node7/data
   [junit4]   2> 942120 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node5/data/index
   [junit4]   2> 942157 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node8/data/index
   [junit4]   2> 942166 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node3/data/index
   [junit4]   2> 942166 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:43732/data/MoveReplicaHDFSTest_coll_false/core_node7/data/index
   [junit4]   2> 942338 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:37546 is added to 
blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-60016a67-60d6-4b08-a181-d021b47031e5:NORMAL:127.0.0.1:42147|RBW],
 
ReplicaUC[[DISK]DS-0fdbea41-a417-4bfd-a57a-6ce16f001a35:NORMAL:127.0.0.1:37546|FINALIZED]]}
 size 0
   [junit4]   2> 942339 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:42147 is added to 
blk_1073741825_1001{UCState=COMMITTED, truncateBlock=null, primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-60016a67-60d6-4b08-a181-d021b47031e5:NORMAL:127.0.0.1:42147|RBW],
 
ReplicaUC[[DISK]DS-61ea4dfa-2224-4a4e-b6a3-092e7c342593:NORMAL:127.0.0.1:37546|RBW]]}
 size 69
   [junit4]   2> 942344 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:42147 is added to 
blk_1073741827_1003 size 69
   [junit4]   2> 942344 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:37546 is added to 
blk_1073741825_1001 size 69
   [junit4]   2> 942348 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:42147 is added to 
blk_1073741828_1004{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-61ea4dfa-2224-4a4e-b6a3-092e7c342593:NORMAL:127.0.0.1:37546|RBW],
 
ReplicaUC[[DISK]DS-4356fac7-3b77-4733-87bc-bb5ee431f764:NORMAL:127.0.0.1:42147|RBW]]}
 size 0
   [junit4]   2> 942350 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:37546 is added to 
blk_1073741826_1002{UCState=COMMITTED, truncateBlock=null, primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-60016a67-60d6-4b08-a181-d021b47031e5:NORMAL:127.0.0.1:42147|RBW],
 
ReplicaUC[[DISK]DS-0fdbea41-a417-4bfd-a57a-6ce16f001a35:NORMAL:127.0.0.1:37546|RBW]]}
 size 69
   [junit4]   2> 942358 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:42147 is added to 
blk_1073741826_1002 size 69
   [junit4]   2> 942358 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:37546 is added to 
blk_1073741828_1004 size 69
   [junit4]   2> 942404 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 942404 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 942404 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 942414 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 942414 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 942450 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 942450 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 942450 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 942457 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@53c993de[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
main]
   [junit4]   2> 942459 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 942459 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 942460 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 942460 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1611153399212933120
   [junit4]   2> 942462 INFO  
(searcherExecutor-5304-thread-1-processing-n:127.0.0.1:40305_solr 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrCore 
[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Registered new searcher 
Searcher@53c993de[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 942463 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 942463 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 942480 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.ZkShardTerms 
Successful update of terms at 
/collections/MoveReplicaHDFSTest_coll_false/terms/shard1 to 
Terms{values={core_node3=0}, version=0}
   [junit4]   2> 942495 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for 
shard shard1: total=2 found=1 timeoutin=9997ms
   [junit4]   2> 942504 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@74c1c581[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
main]
   [junit4]   2> 942505 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 942506 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 942507 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 942508 INFO  
(searcherExecutor-5307-thread-1-processing-n:127.0.0.1:42663_solr 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrCore 
[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] Registered new searcher 
Searcher@74c1c581[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 942508 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1611153399263264768
   [junit4]   2> 942514 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ZkShardTerms 
Successful update of terms at 
/collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to 
Terms{values={core_node8=0}, version=0}
   [junit4]   2> 942523 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for 
shard shard2: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 942798 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 942798 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 942798 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 942800 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 942800 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 942800 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 942809 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 942809 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 942810 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 942810 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 942829 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@31fd5526[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
main]
   [junit4]   2> 942830 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@54b02a32[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
main]
   [junit4]   2> 942831 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 942831 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 942831 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 942832 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 942832 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 942832 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 942833 INFO  
(searcherExecutor-5306-thread-1-processing-n:127.0.0.1:45953_solr 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrCore 
[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Registered new searcher 
Searcher@31fd5526[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 942833 INFO  
(searcherExecutor-5305-thread-1-processing-n:127.0.0.1:44949_solr 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrCore 
[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] Registered new searcher 
Searcher@54b02a32[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 942833 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1611153399604051968
   [junit4]   2> 942833 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1611153399604051968
   [junit4]   2> 942837 INFO  (qtp915805795-11315) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ZkShardTerms 
Successful update of terms at 
/collections/MoveReplicaHDFSTest_coll_false/terms/shard1 to 
Terms{values={core_node3=0, core_node5=0}, version=1}
   [junit4]   2> 942838 INFO  (qtp2112083181-11323) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.ZkShardTerms 
Successful update of terms at 
/collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to 
Terms{values={core_node7=0, core_node8=0}, version=1}
   [junit4]   2> 942999 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 942999 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 942999 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SyncStrategy Sync 
replicas to 
https://127.0.0.1:40305/solr/MoveReplicaHDFSTest_coll_false_shard1_replica_n1/
   [junit4]   2> 942999 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.PeerSync PeerSync: 
core=MoveReplicaHDFSTest_coll_false_shard1_replica_n1 
url=https://127.0.0.1:40305/solr START 
replicas=[https://127.0.0.1:44949/solr/MoveReplicaHDFSTest_coll_false_shard1_replica_n2/]
 nUpdates=100
   [junit4]   2> 943000 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.PeerSync PeerSync: 
core=MoveReplicaHDFSTest_coll_false_shard1_replica_n1 
url=https://127.0.0.1:40305/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 943004 INFO  (qtp915805795-11336) [n:127.0.0.1:44949_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.S.Request 
[MoveReplicaHDFSTest_coll_false_shard1_replica_n2]  webapp=/solr path=/get 
params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 943005 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SyncStrategy 
Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 943005 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we 
can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 943005 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext Found all replicas participating in 
election, clear LIR
   [junit4]   2> 943007 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext I am the new leader: 
https://127.0.0.1:40305/solr/MoveReplicaHDFSTest_coll_false_shard1_replica_n1/ 
shard1
   [junit4]   2> 943024 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 943024 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 943024 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SyncStrategy Sync 
replicas to 
https://127.0.0.1:42663/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n6/
   [junit4]   2> 943024 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.PeerSync PeerSync: 
core=MoveReplicaHDFSTest_coll_false_shard2_replica_n6 
url=https://127.0.0.1:42663/solr START 
replicas=[https://127.0.0.1:45953/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n4/]
 nUpdates=100
   [junit4]   2> 943025 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.PeerSync PeerSync: 
core=MoveReplicaHDFSTest_coll_false_shard2_replica_n6 
url=https://127.0.0.1:42663/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 943027 INFO  (qtp2112083181-11321) [n:127.0.0.1:45953_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.S.Request 
[MoveReplicaHDFSTest_coll_false_shard2_replica_n4]  webapp=/solr path=/get 
params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 943028 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SyncStrategy 
Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 943028 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we 
can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 943028 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContext Found all replicas participating in 
election, clear LIR
   [junit4]   2> 943031 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContext I am the new leader: 
https://127.0.0.1:42663/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n6/ 
shard2
   [junit4]   2> 943133 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ZkController I am 
the leader, no recovery necessary
   [junit4]   2> 943137 INFO  (qtp410285370-11610) [n:127.0.0.1:42663_solr 
c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 
x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.HttpSolrCall 
[admin] webapp=null path=/admin/cores 
params={qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT}
 status=0 QTime=2138
   [junit4]   2> 943160 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.ZkController I am 
the leader, no recovery necessary
   [junit4]   2> 943161 INFO  (qtp110358908-11317) [n:127.0.0.1:40305_solr 
c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 
x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.HttpSolrCall 
[admin] webapp=null path=/admin/cores 
params={qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT}
 status=0 QTime=2196
   [junit4]   2> 943262 INFO  (zkCallback-2856-thread-1) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/MoveReplicaHDFSTest_coll_false/state.json] for collection 
[MoveReplicaHDFSTest_coll_false] has occurred - updating... (live nodes size: 
[5])
   [junit4]   2> 943262 INFO  (zkCallback-2811-thread-1) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncCon

[...truncated too long message...]

heck:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/null1410140276
     [copy] Copying 239 files to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/null1410140276
   [delete] Deleting directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/null1410140276

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: 
org.eclipse.jgit#org.eclipse.jgit-caller;working
[ivy:cachepath]         confs: [default]
[ivy:cachepath]         found 
org.eclipse.jgit#org.eclipse.jgit;4.6.0.201612231935-r in public
[ivy:cachepath]         found com.jcraft#jsch;0.1.53 in public
[ivy:cachepath]         found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath]         found org.apache.httpcomponents#httpclient;4.3.6 in 
public
[ivy:cachepath]         found org.apache.httpcomponents#httpcore;4.3.3 in public
[ivy:cachepath]         found commons-logging#commons-logging;1.1.3 in public
[ivy:cachepath]         found commons-codec#commons-codec;1.6 in public
[ivy:cachepath]         found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 53ms :: artifacts dl 5ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   8   |   0   |   0   |   0   ||   8   |   0   |
        ---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
[wc-checker] SLF4J: Defaulting to no-operation (NOP) logger implementation
[wc-checker] SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for 
further details.
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 86 minutes 9 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
        at hudson.FilePath$34.hasMatch(FilePath.java:2678)
        at hudson.FilePath$34.invoke(FilePath.java:2557)
        at hudson.FilePath$34.invoke(FilePath.java:2547)
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2918)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
                at 
hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
                at 
hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
                at hudson.remoting.Channel.call(Channel.java:955)
                at hudson.FilePath.act(FilePath.java:1036)
                at hudson.FilePath.act(FilePath.java:1025)
                at hudson.FilePath.validateAntFileMask(FilePath.java:2547)
                at 
hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
                at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
                at 
hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
                at hudson.model.Build$BuildExecution.post2(Build.java:186)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
                at hudson.model.Run.execute(Run.java:1819)
                at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
                at 
hudson.model.ResourceController.execute(ResourceController.java:97)
                at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2920)
        at hudson.remoting.UserRequest.perform(UserRequest.java:212)
        at hudson.remoting.UserRequest.perform(UserRequest.java:54)
        at hudson.remoting.Request$2.run(Request.java:369)
        at 
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no 
matches found within 10000
        at hudson.FilePath.act(FilePath.java:1038)
        at hudson.FilePath.act(FilePath.java:1025)
        at hudson.FilePath.validateAntFileMask(FilePath.java:2547)
        at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
        at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
        at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
        at hudson.model.Build$BuildExecution.post2(Build.java:186)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
        at hudson.model.Run.execute(Run.java:1819)
        at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
        at hudson.model.ResourceController.execute(ResourceController.java:97)
        at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern 
"**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to