Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1858/

1 tests failed.
FAILED:  
org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for 
collection=testSimple2 
:DocCollection(testSimple2//collections/testSimple2/state.json/23)={   
"pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{      
 "range":"80000000-ffffffff",       "state":"active",       "replicas":{        
 "core_node3":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/", 
          "base_url":"http://127.0.0.1:41913/solr";,           
"node_name":"127.0.0.1:41913_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",
           "core":"testSimple2_shard1_replica_n1",           
"shared_storage":"true",           "state":"active",           
"leader":"true"},         "core_node5":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/", 
          "base_url":"http://127.0.0.1:46515/solr";,           
"node_name":"127.0.0.1:46515_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",
           "core":"testSimple2_shard1_replica_n2",           
"shared_storage":"true",           "state":"down"}}},     "shard2":{       
"range":"0-7fffffff",       "state":"active",       "replicas":{         
"core_node7":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/", 
          "base_url":"http://127.0.0.1:41913/solr";,           
"node_name":"127.0.0.1:41913_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",
           "core":"testSimple2_shard2_replica_n4",           
"shared_storage":"true",           "state":"active",           
"leader":"true"},         "core_node8":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/", 
          "base_url":"http://127.0.0.1:46515/solr";,           
"node_name":"127.0.0.1:46515_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",
           "core":"testSimple2_shard2_replica_n6",           
"shared_storage":"true",           "state":"down"}}}},   
"router":{"name":"compositeId"},   "maxShardsPerNode":"2",   
"autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live 
Nodes: [127.0.0.1:41913_solr, 127.0.0.1:43581_solr] Last available state: 
DocCollection(testSimple2//collections/testSimple2/state.json/23)={   
"pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{      
 "range":"80000000-ffffffff",       "state":"active",       "replicas":{        
 "core_node3":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/", 
          "base_url":"http://127.0.0.1:41913/solr";,           
"node_name":"127.0.0.1:41913_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",
           "core":"testSimple2_shard1_replica_n1",           
"shared_storage":"true",           "state":"active",           
"leader":"true"},         "core_node5":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/", 
          "base_url":"http://127.0.0.1:46515/solr";,           
"node_name":"127.0.0.1:46515_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",
           "core":"testSimple2_shard1_replica_n2",           
"shared_storage":"true",           "state":"down"}}},     "shard2":{       
"range":"0-7fffffff",       "state":"active",       "replicas":{         
"core_node7":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/", 
          "base_url":"http://127.0.0.1:41913/solr";,           
"node_name":"127.0.0.1:41913_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",
           "core":"testSimple2_shard2_replica_n4",           
"shared_storage":"true",           "state":"active",           
"leader":"true"},         "core_node8":{           
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/", 
          "base_url":"http://127.0.0.1:46515/solr";,           
"node_name":"127.0.0.1:46515_solr",           "type":"NRT",           
"force_set_state":"false",           
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",
           "core":"testSimple2_shard2_replica_n6",           
"shared_storage":"true",           "state":"down"}}}},   
"router":{"name":"compositeId"},   "maxShardsPerNode":"2",   
"autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 
:DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:41913/solr";,
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node5":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:46515/solr";,
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"down"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:41913/solr";,
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node8":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:46515/solr";,
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"down"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:41913_solr, 127.0.0.1:43581_solr]
Last available state: 
DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:41913/solr";,
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node5":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:46515/solr";,
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"down"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:41913/solr";,
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node8":{
          
"dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:46515/solr";,
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          
"ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"down"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
        at 
__randomizedtesting.SeedInfo.seed([361D0D430A63287:3BD2F42A1755E656]:0)
        at org.junit.Assert.fail(Assert.java:88)
        at 
org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
        at 
org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
        at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.base/java.lang.reflect.Method.invoke(Method.java:566)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14756 lines...]
   [junit4] Suite: 
org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> 2980352 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: 
test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/init-core-data-001
   [junit4]   2> 2980353 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) 
w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 2980354 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: 
@org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, 
clientAuth=0.0/0.0)
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 2980419 WARN  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried 
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 2980435 WARN  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2980438 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2980439 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2980439 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2980439 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2980440 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@6d01064b{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 2980668 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@27cf48f3{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-35532-hdfs-_-any-8530677796192721066.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 2980669 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@1187843d{HTTP/1.1,[http/1.1]}{localhost:35532}
   [junit4]   2> 2980669 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.Server Started @2980734ms
   [junit4]   2> 2980748 WARN  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2980750 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2980751 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2980751 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2980751 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2980751 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@bda7ed7{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 2980926 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@4c12ea3{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-35336-datanode-_-any-4716850712572889762.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 2980926 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@112e080c{HTTP/1.1,[http/1.1]}{localhost:35336}
   [junit4]   2> 2980926 INFO  
(SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] 
o.e.j.s.Server Started @2980992ms
   [junit4]   2> 2981101 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x43539fc59a27abe7: Processing first storage report for 
DS-040cda49-d6f2-494a-8530-00b6a7a01fd6 from datanode 
256d0d71-b1d4-4ba5-908b-e9c8fa080cc3
   [junit4]   2> 2981102 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x43539fc59a27abe7: from storage 
DS-040cda49-d6f2-494a-8530-00b6a7a01fd6 node 
DatanodeRegistration(127.0.0.1:40143, 
datanodeUuid=256d0d71-b1d4-4ba5-908b-e9c8fa080cc3, infoPort=34049, 
infoSecurePort=0, ipcPort=41564, 
storageInfo=lv=-57;cid=testClusterID;nsid=1842625238;c=1559264232657), blocks: 
0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 2981102 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x43539fc59a27abe7: Processing first storage report for 
DS-9502dc70-d4c3-4373-aa7e-2a0c83d266fd from datanode 
256d0d71-b1d4-4ba5-908b-e9c8fa080cc3
   [junit4]   2> 2981102 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x43539fc59a27abe7: from storage 
DS-9502dc70-d4c3-4373-aa7e-2a0c83d266fd node 
DatanodeRegistration(127.0.0.1:40143, 
datanodeUuid=256d0d71-b1d4-4ba5-908b-e9c8fa080cc3, infoPort=34049, 
infoSecurePort=0, ipcPort=41564, 
storageInfo=lv=-57;cid=testClusterID;nsid=1842625238;c=1559264232657), blocks: 
0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 2981136 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.SolrTestCaseJ4 ###Starting testSimple
   [junit4]   2> 2981137 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 3 servers in 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002
   [junit4]   2> 2981137 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 2981138 INFO  (ZkTestServer Run Thread) [    ] 
o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 2981138 INFO  (ZkTestServer Run Thread) [    ] 
o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 2981238 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.ZkTestServer start zk server on port:33581
   [junit4]   2> 2981238 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:33581
   [junit4]   2> 2981238 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 33581
   [junit4]   2> 2981248 INFO  (zkConnectionManagerCallback-14872-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981255 INFO  (zkConnectionManagerCallback-14874-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981257 INFO  (zkConnectionManagerCallback-14876-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981261 WARN  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2981261 WARN  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2981261 WARN  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4e5a0d38{/solr,null,AVAILABLE}
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@5ca0d666{/solr,null,AVAILABLE}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@28bbc790{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:41913}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.s.Server Started @2981331ms
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=41913}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@3e5111be{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:43581}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.s.Server Started @2981332ms
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=43581}
   [junit4]   2> 2981266 ERROR (jetty-launcher-14877-thread-1) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
9.0.0
   [junit4]   2> 2981266 ERROR (jetty-launcher-14877-thread-3) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-05-31T00:57:13.529769Z
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
9.0.0
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-05-31T00:57:13.529889Z
   [junit4]   2> 2981267 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2981267 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2981267 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2981268 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@15132aad{/solr,null,AVAILABLE}
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@635b805f{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:46515}
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.s.Server Started @2981334ms
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=46515}
   [junit4]   2> 2981269 ERROR (jetty-launcher-14877-thread-2) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
9.0.0
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-05-31T00:57:13.532829Z
   [junit4]   2> 2981270 INFO  (zkConnectionManagerCallback-14881-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981271 INFO  (zkConnectionManagerCallback-14879-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981271 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 2981271 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 2981272 INFO  (zkConnectionManagerCallback-14883-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981272 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay 
is ignored
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter 
autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay 
is ignored
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter 
autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2981277 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 2981277 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 2981280 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay 
is ignored
   [junit4]   2> 2981280 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter 
autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2981282 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 2981384 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2981385 WARN  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@5cdf813a[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981389 WARN  (jetty-launcher-14877-thread-3) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@73dca498[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981390 INFO  (jetty-launcher-14877-thread-3) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33581/solr
   [junit4]   2> 2981393 INFO  (zkConnectionManagerCallback-14892-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981396 INFO  (zkConnectionManagerCallback-14894-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981500 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2981501 WARN  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@68c21517[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981504 WARN  (jetty-launcher-14877-thread-2) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@48a3eb44[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981506 INFO  (jetty-launcher-14877-thread-2) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33581/solr
   [junit4]   2> 2981507 INFO  (zkConnectionManagerCallback-14902-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981509 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.OverseerElectionContext I am going to be 
the leader 127.0.0.1:43581_solr
   [junit4]   2> 2981510 INFO  (zkConnectionManagerCallback-14904-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981510 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.Overseer Overseer 
(id=73394461771694087-127.0.0.1:43581_solr-n_0000000000) starting
   [junit4]   2> 2981521 INFO  (zkConnectionManagerCallback-14911-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981524 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981530 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:46515_solr
   [junit4]   2> 2981542 INFO  
(OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.Overseer Starting to work on the main 
queue : 127.0.0.1:43581_solr
   [junit4]   2> 2981543 INFO  
(OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (1)
   [junit4]   2> 2981546 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.ZkController Publish 
node=127.0.0.1:43581_solr as DOWN
   [junit4]   2> 2981548 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 4 transient cores
   [junit4]   2> 2981548 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:43581_solr
   [junit4]   2> 2981548 INFO  (zkCallback-14910-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2981552 INFO  (zkCallback-14910-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2981566 INFO  (zkCallback-14903-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2981574 INFO  (zkCallback-14893-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2981582 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 2981584 INFO  (zkConnectionManagerCallback-14916-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981587 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (2)
   [junit4]   2> 2981588 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981641 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 2981644 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981683 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981684 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981688 INFO  (jetty-launcher-14877-thread-3) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node3/.
   [junit4]   2> 2981697 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981704 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2981706 WARN  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@439a77a7[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981709 WARN  (jetty-launcher-14877-thread-1) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@3df98575[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981710 INFO  (jetty-launcher-14877-thread-1) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33581/solr
   [junit4]   2> 2981712 INFO  (zkConnectionManagerCallback-14924-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981716 INFO  (zkConnectionManagerCallback-14926-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981718 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981718 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981720 INFO  (jetty-launcher-14877-thread-2) 
[n:127.0.0.1:46515_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node2/.
   [junit4]   2> 2981723 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (2)
   [junit4]   2> 2981731 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.ZkController Publish 
node=127.0.0.1:41913_solr as DOWN
   [junit4]   2> 2981732 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 4 transient cores
   [junit4]   2> 2981732 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:41913_solr
   [junit4]   2> 2981734 INFO  (zkCallback-14903-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981734 INFO  (zkCallback-14910-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981734 INFO  (zkCallback-14893-thread-2) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981734 INFO  (zkCallback-14915-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981736 INFO  (zkCallback-14925-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981739 INFO  (zkConnectionManagerCallback-14933-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981741 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (3)
   [junit4]   2> 2981742 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981763 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 2981795 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981816 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981816 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981818 INFO  (jetty-launcher-14877-thread-1) 
[n:127.0.0.1:41913_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node1/.
   [junit4]   2> 2981896 INFO  (zkConnectionManagerCallback-14939-thread-1) [   
 ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981898 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 2981900 INFO  
(TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [   
 ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33581/solr 
ready
   [junit4]   2> 2981907 INFO  (qtp1194898800-43151) [n:127.0.0.1:41913_solr    
] o.a.s.s.HttpSolrCall [admin] webapp=null path=/cluster 
params={wt=javabin&version=2} status=0 QTime=2
   [junit4]   2> 2981910 INFO  (qtp502828306-43156) [n:127.0.0.1:46515_solr    
] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params 
collection.configName=conf&maxShardsPerNode=2&autoAddReplicas=true&name=testSimple1&nrtReplicas=2&action=CREATE&numShards=2&createNodeSet=127.0.0.1:43581_solr,127.0.0.1:46515_solr&wt=javabin&version=2
 and sendToOCPQueue=true
   [junit4]   2> 2981914 INFO  
(OverseerThreadFactory-12067-thread-1-processing-n:127.0.0.1:43581_solr) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection 
testSimple1
   [junit4]   2> 2982024 INFO  
(OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:43581/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982027 INFO  
(OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:46515/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982031 INFO  
(OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:43581/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982033 INFO  
(OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) 
[n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:46515/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982237 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr    
x:testSimple1_shard1_replica_n2] o.a.s.h.a.CoreAdminOperation core create 
command 
qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n2&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2982237 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr    
x:testSimple1_shard1_replica_n2] o.a.s.c.TransientSolrCoreCacheDefault 
Allocating transient cache for 4 transient cores
   [junit4]   2> 2982238 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr    
x:testSimple1_shard2_replica_n6] o.a.s.h.a.CoreAdminOperation core create 
command 
qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n6&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2982238 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr    
x:testSimple1_shard2_replica_n4] o.a.s.h.a.CoreAdminOperation core create 
command 
qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n4&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2982238 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr    
x:testSimple1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create 
command 
qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2983252 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983252 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983252 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983280 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983289 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.s.IndexSchema [testSimple1_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 2983289 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.s.IndexSchema [testSimple1_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 2983289 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.s.IndexSchema [testSimple1_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 2983291 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.s.IndexSchema [testSimple1_shard2_replica_n4] Schema name=minimal
   [junit4]   2> 2983291 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983291 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983291 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n6' using 
configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983291 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n2' using 
configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983291 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983292 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n1' using 
configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983292 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.core.testSimple1.shard2.replica_n6' (registry 
'solr.core.testSimple1.shard2.replica_n6') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983292 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.core.testSimple1.shard1.replica_n2' (registry 
'solr.core.testSimple1.shard1.replica_n2') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983292 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.core.testSimple1.shard1.replica_n1' (registry 
'solr.core.testSimple1.shard1.replica_n1') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983293 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983293 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n4' using 
configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983294 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.core.testSimple1.shard2.replica_n4' (registry 
'solr.core.testSimple1.shard2.replica_n4') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983295 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983295 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983295 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.SolrCore [[testSimple1_shard2_replica_n6] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node2/testSimple1_shard2_replica_n6],
 dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data/]
   [junit4]   2> 2983296 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983296 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983296 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.SolrCore [[testSimple1_shard1_replica_n1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node3/testSimple1_shard1_replica_n1],
 dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data/]
   [junit4]   2> 2983296 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983296 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983296 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.SolrCore [[testSimple1_shard1_replica_n2] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node2/testSimple1_shard1_replica_n2],
 dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data/]
   [junit4]   2> 2983296 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983297 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983297 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.SolrCore [[testSimple1_shard2_replica_n4] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node3/testSimple1_shard2_replica_n4],
 dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data/]
   [junit4]   2> 2983297 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data/snapshot_metadata
   [junit4]   2> 2983306 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data/snapshot_metadata
   [junit4]   2> 2983306 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data/snapshot_metadata
   [junit4]   2> 2983306 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data/snapshot_metadata
   [junit4]   2> 2983313 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983313 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983313 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983314 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983314 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983314 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983314 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983314 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983314 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983321 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983321 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983321 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983327 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983328 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983329 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data
   [junit4]   2> 2983331 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data
   [junit4]   2> 2983340 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983342 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data
   [junit4]   2> 2983353 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data/index
   [junit4]   2> 2983356 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data/index
   [junit4]   2> 2983361 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983361 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983361 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983363 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data/index
   [junit4]   2> 2983363 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983363 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983363 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983367 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983371 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983376 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983376 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983376 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983385 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983473 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983474 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data
   [junit4]   2> 2983526 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data/index
   [junit4]   2> 2983536 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2983536 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2983536 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2983545 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2983545 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2983545 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2983546 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2983547 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983547 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983549 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2983549 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2983558 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983564 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2983564 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2983582 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2983582 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2983582 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2983582 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.s.SolrIndexSearcher Opening 
[Searcher@1800cf53[testSimple1_shard1_replica_n1] main]
   [junit4]   2> 2983583 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.s.SolrIndexSearcher Opening 
[Searcher@584223c2[testSimple1_shard1_replica_n2] main]
   [junit4]   2> 2983584 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf
   [junit4]   2> 2983585 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf
   [junit4]   2> 2983586 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2983586 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2983587 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2983587 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1635007055370649600
   [junit4]   2> 2983590 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2983591 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1635007055374843904
   [junit4]   2> 2983593 INFO  
(searcherExecutor-12084-thread-1-processing-n:127.0.0.1:46515_solr 
x:testSimple1_shard1_replica_n2 c:testSimple1 s:shard1 r:core_node5) 
[n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 
x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore 
[testSimple1_shard1_replica_n2] Registered new searcher 
Searcher@584223c2[testSimple1_shard1_replica_n2] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2983594 INFO  
(searcherExecutor-12085-thread-1-processing-n:127.0.0.1:43581_solr 
x:testSimple1_shard1_replica_n1 c:testSimple1 s:shard1 r:core_node3) 
[n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 
x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore 
[testSimple1_shard1_replica_n1] Registered new searcher 
Searcher@1800cf53[testSimple1_shard1_replica_n1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2983599 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ZkShardTerms Successful update of terms at 
/collections/testSimple1/terms/shard1 to Terms{values={core_node3=0}, version=0}
   [junit4]   2> 2983600 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/testSimple1/leaders/shard1
   [junit4]   2> 2983600 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.ZkShardTerms Successful update of terms at 
/collections/testSimple1/terms/shard1 to Terms{values={core_node3=0, 
core_node5=0}, version=1}
   [junit4]   2> 2983601 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/testSimple1/leaders/shard1
   [junit4]   2> 2983605 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2983605 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2983606 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2983606 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2983606 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:43581/solr/testSimple1_shard1_replica_n1/
   [junit4]   2> 2983607 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n1 
url=http://127.0.0.1:43581/solr START 
replicas=[http://127.0.0.1:46515/solr/testSimple1_shard1_replica_n2/] 
nUpdates=100
   [junit4]   2> 2983609 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n1 
url=http://127.0.0.1:43581/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 2983613 INFO  (qtp502828306-43164) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.c.S.Request [testSimple1_shard1_replica_n2]  webapp=/solr path=/get 
params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2}
 status=0 QTime=1
   [junit4]   2> 2983613 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the 
next candidate
   [junit4]   2> 2983613 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we 
can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 2983613 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node 
/collections/testSimple1/leaders/shard1/leader after winning as 
/collections/testSimple1/leader_elect/shard1/election/73394461771694087-core_node3-n_0000000000
   [junit4]   2> 2983616 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:43581/solr/testSimple1_shard1_replica_n1/ shard1
   [junit4]   2> 2983622 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.s.SolrIndexSearcher Opening 
[Searcher@433c2d97[testSimple1_shard2_replica_n4] main]
   [junit4]   2> 2983623 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf
   [junit4]   2> 2983624 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2983625 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2983625 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1635007055410495488
   [junit4]   2> 2983628 INFO  
(searcherExecutor-12086-thread-1-processing-n:127.0.0.1:43581_solr 
x:testSimple1_shard2_replica_n4 c:testSimple1 s:shard2 r:core_node7) 
[n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 
x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore 
[testSimple1_shard2_replica_n4] Registered new searcher 
Searcher@433c2d97[testSimple1_shard2_replica_n4] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2983630 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.ZkShardTerms Successful update of terms at 
/collections/testSimple1/terms/shard2 to Terms{values={core_node7=0}, version=0}
   [junit4]   2> 2983630 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/testSimple1/leaders/shard2
   [junit4]   2> 2983636 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for 
shard shard2: total=2 found=1 timeoutin=14999ms
   [junit4]   2> 2983737 INFO  (zkCallback-14893-thread-2) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/testSimple1/state.json] for collection [testSimple1] has 
occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983737 INFO  (zkCallback-14893-thread-1) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/testSimple1/state.json] for collection [testSimple1] has 
occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983739 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 2983742 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT}
 status=0 QTime=1503
   [junit4]   2> 2983842 INFO  (zkCallback-14893-thread-1) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/testSimple1/state.json] for collection [testSimple1] has 
occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983842 INFO  (zkCallback-14893-thread-2) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/testSimple1/state.json] for collection [testSimple1] has 
occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983842 INFO  (zkCallback-14893-thread-3) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/testSimple1/state.json] for collection [testSimple1] has 
occurred - updating... (live nodes size: [3])
   [junit4]   2> 2984108 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2984108 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2984108 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2984121 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2984121 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2984134 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.s.SolrIndexSearcher Opening 
[Searcher@787559aa[testSimple1_shard2_replica_n6] main]
   [junit4]   2> 2984135 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf
   [junit4]   2> 2984136 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2984136 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2984137 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1635007055947366400
   [junit4]   2> 2984141 INFO  
(searcherExecutor-12083-thread-1-processing-n:127.0.0.1:46515_solr 
x:testSimple1_shard2_replica_n6 c:testSimple1 s:shard2 r:core_node8) 
[n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 
x:testSimple1_shard2_replica_n6] o.a.s.c.SolrCore 
[testSimple1_shard2_replica_n6] Registered new searcher 
Searcher@787559aa[testSimple1_shard2_replica_n6] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2984144 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.ZkShardTerms Successful update of terms at 
/collections/testSimple1/terms/shard2 to Terms{values={core_node7=0, 
core_node8=0}, version=1}
   [junit4]   2> 2984144 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/testSimple1/leaders/shard2
   [junit4]   2> 2984612 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n2&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT}
 status=0 QTime=2375
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:43581/solr/testSimple1_shard2_replica_n4/
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 
url=http://127.0.0.1:43581/solr START 
replicas=[http://127.0.0.1:46515/solr/testSimple1_shard2_replica_n6/] 
nUpdates=100
   [junit4]   2> 2984638 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr 
c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] 
o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 
url=http://127.0.0.1:43581/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 2984639 INFO  (qtp502828306-43160) [n:127.0.0.1:46515_solr 
c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a

[...truncated too long message...]

ail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null666246468
     [copy] Copying 240 files to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null666246468
   [delete] Deleting directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null666246468

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
[ivy:cachepath]         confs: [default]
[ivy:cachepath]         found 
org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath]         found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath]         found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath]         found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath]         found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath]         found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath]         found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath]         found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath]         found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 53ms :: artifacts dl 4ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
        ---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 327 minutes 44 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
        at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
        at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
        at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
                at 
hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
                at 
hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
                at hudson.remoting.Channel.call(Channel.java:955)
                at hudson.FilePath.act(FilePath.java:1072)
                at hudson.FilePath.act(FilePath.java:1061)
                at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
                at 
hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
                at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
                at 
hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
                at hudson.model.Build$BuildExecution.post2(Build.java:186)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
                at hudson.model.Run.execute(Run.java:1835)
                at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
                at 
hudson.model.ResourceController.execute(ResourceController.java:97)
                at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
        at hudson.remoting.UserRequest.perform(UserRequest.java:212)
        at hudson.remoting.UserRequest.perform(UserRequest.java:54)
        at hudson.remoting.Request$2.run(Request.java:369)
        at 
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no 
matches found within 10000
        at hudson.FilePath.act(FilePath.java:1074)
        at hudson.FilePath.act(FilePath.java:1061)
        at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
        at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
        at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
        at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
        at hudson.model.Build$BuildExecution.post2(Build.java:186)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
        at hudson.model.Run.execute(Run.java:1835)
        at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
        at hudson.model.ResourceController.execute(ResourceController.java:97)
        at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern 
"**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to