Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-6.x/301/

2 tests failed.
FAILED:  org.apache.solr.cloud.hdfs.StressHdfsTest.test

Error Message:
Timeout occured while waiting response from server at: http://127.0.0.1:46021

Stack Trace:
org.apache.solr.client.solrj.SolrServerException: Timeout occured while waiting 
response from server at: http://127.0.0.1:46021
        at 
__randomizedtesting.SeedInfo.seed([BE76C68615415A1:83B353B2CFA87859]:0)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:621)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:279)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:268)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:435)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:387)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:1361)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:1112)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:1042)
        at org.apache.solr.client.solrj.SolrClient.request(SolrClient.java:1219)
        at 
org.apache.solr.cloud.hdfs.StressHdfsTest.createAndDeleteCollection(StressHdfsTest.java:220)
        at 
org.apache.solr.cloud.hdfs.StressHdfsTest.test(StressHdfsTest.java:103)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.net.SocketTimeoutException: Read timed out
        at java.net.SocketInputStream.socketRead0(Native Method)
        at java.net.SocketInputStream.socketRead(SocketInputStream.java:116)
        at java.net.SocketInputStream.read(SocketInputStream.java:171)
        at java.net.SocketInputStream.read(SocketInputStream.java:141)
        at 
org.apache.http.impl.io.AbstractSessionInputBuffer.fillBuffer(AbstractSessionInputBuffer.java:160)
        at 
org.apache.http.impl.io.SocketInputBuffer.fillBuffer(SocketInputBuffer.java:84)
        at 
org.apache.http.impl.io.AbstractSessionInputBuffer.readLine(AbstractSessionInputBuffer.java:273)
        at 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:140)
        at 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:57)
        at 
org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:261)
        at 
org.apache.http.impl.AbstractHttpClientConnection.receiveResponseHeader(AbstractHttpClientConnection.java:283)
        at 
org.apache.http.impl.conn.DefaultClientConnection.receiveResponseHeader(DefaultClientConnection.java:251)
        at 
org.apache.http.impl.conn.ManagedClientConnectionImpl.receiveResponseHeader(ManagedClientConnectionImpl.java:197)
        at 
org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:272)
        at 
org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:124)
        at 
org.apache.http.impl.client.DefaultRequestDirector.tryExecute(DefaultRequestDirector.java:685)
        at 
org.apache.http.impl.client.DefaultRequestDirector.execute(DefaultRequestDirector.java:487)
        at 
org.apache.http.impl.client.AbstractHttpClient.doExecute(AbstractHttpClient.java:882)
        at 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:82)
        at 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:55)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:515)
        ... 51 more


FAILED:  org.apache.solr.update.TestInPlaceUpdatesDistrib.test

Error Message:
'sanitycheck' results against client: 
org.apache.solr.client.solrj.impl.HttpSolrClient@1637f00 (not leader) wrong 
[docid] for SolrDocument{id=94, 
id_field_copy_that_does_not_support_in_place_update_s=94, title_s=title94, 
id_i=94, inplace_updatable_float=101.0, _version_=1561066829263142912, 
inplace_updatable_int_with_default=666, 
inplace_updatable_float_with_default=42.0, [docid]=5586} expected:<6049> but 
was:<5586>

Stack Trace:
java.lang.AssertionError: 'sanitycheck' results against client: 
org.apache.solr.client.solrj.impl.HttpSolrClient@1637f00 (not leader) wrong 
[docid] for SolrDocument{id=94, 
id_field_copy_that_does_not_support_in_place_update_s=94, title_s=title94, 
id_i=94, inplace_updatable_float=101.0, _version_=1561066829263142912, 
inplace_updatable_int_with_default=666, 
inplace_updatable_float_with_default=42.0, [docid]=5586} expected:<6049> but 
was:<5586>
        at 
__randomizedtesting.SeedInfo.seed([BE76C68615415A1:83B353B2CFA87859]:0)
        at org.junit.Assert.fail(Assert.java:93)
        at org.junit.Assert.failNotEquals(Assert.java:647)
        at org.junit.Assert.assertEquals(Assert.java:128)
        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.assertDocIdsAndValuesInResults(TestInPlaceUpdatesDistrib.java:442)
        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.assertDocIdsAndValuesAgainstAllClients(TestInPlaceUpdatesDistrib.java:413)
        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.docValuesUpdateTest(TestInPlaceUpdatesDistrib.java:321)
        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.test(TestInPlaceUpdatesDistrib.java:140)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 11756 lines...]
   [junit4] Suite: org.apache.solr.cloud.hdfs.StressHdfsTest
   [junit4]   2> Creating dataDir: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/init-core-data-001
   [junit4]   2> 476282 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using PointFields
   [junit4]   2> 476283 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: 
@org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 476284 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 476346 WARN  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried 
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 476366 WARN  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 476373 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 476387 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.2-tests.jar!/webapps/hdfs
 to ./temp/Jetty_localhost_58762_hdfs____.hfsome/webapp
   [junit4]   2> 476978 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log Started 
HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:58762
   [junit4]   2> 477214 WARN  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 477215 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 477233 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.2-tests.jar!/webapps/datanode
 to ./temp/Jetty_localhost_52885_datanode____.f4brfl/webapp
   [junit4]   2> 477954 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log Started 
HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:52885
   [junit4]   2> 478254 WARN  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 478255 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 478286 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.2-tests.jar!/webapps/datanode
 to ./temp/Jetty_localhost_32906_datanode____r32f53/webapp
   [junit4]   2> 478414 INFO  (IPC Server handler 3 on 40339) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-cdbd1df7-e984-4973-8109-00b1cda7d4a2 node 
DatanodeRegistration(127.0.0.1:46374, 
datanodeUuid=c65671ff-97a4-4872-b2ad-fb320550635c, infoPort=57278, 
infoSecurePort=0, ipcPort=49173, 
storageInfo=lv=-56;cid=testClusterID;nsid=690169270;c=0), blocks: 0, 
hasStaleStorage: true, processing time: 4 msecs
   [junit4]   2> 478414 INFO  (IPC Server handler 3 on 40339) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-85f4f4c2-430e-49c7-b25c-5921ebe6b6e5 node 
DatanodeRegistration(127.0.0.1:46374, 
datanodeUuid=c65671ff-97a4-4872-b2ad-fb320550635c, infoPort=57278, 
infoSecurePort=0, ipcPort=49173, 
storageInfo=lv=-56;cid=testClusterID;nsid=690169270;c=0), blocks: 0, 
hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 478887 INFO  
(SUITE-StressHdfsTest-seed#[BE76C68615415A1]-worker) [    ] o.m.log Started 
HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:32906
   [junit4]   2> 479075 INFO  (IPC Server handler 9 on 40339) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-503133e2-debd-4d14-bb2c-4a78c882f9a2 node 
DatanodeRegistration(127.0.0.1:52639, 
datanodeUuid=1807bb09-d803-4fde-b184-c701cd927e6f, infoPort=57861, 
infoSecurePort=0, ipcPort=32907, 
storageInfo=lv=-56;cid=testClusterID;nsid=690169270;c=0), blocks: 0, 
hasStaleStorage: true, processing time: 0 msecs
   [junit4]   2> 479075 INFO  (IPC Server handler 9 on 40339) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-c9f84a73-d657-4274-99bd-979f052250a3 node 
DatanodeRegistration(127.0.0.1:52639, 
datanodeUuid=1807bb09-d803-4fde-b184-c701cd927e6f, infoPort=57861, 
infoSecurePort=0, ipcPort=32907, 
storageInfo=lv=-56;cid=testClusterID;nsid=690169270;c=0), blocks: 0, 
hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 479239 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 479239 INFO  (Thread-25887) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 479239 INFO  (Thread-25887) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 479339 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ZkTestServer start zk server on port:34658
   [junit4]   2> 479378 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 479379 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 479380 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 479381 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 479382 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 479384 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 479388 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 479391 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 479396 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 479397 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 479402 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 479486 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/control-001/cores/collection1
   [junit4]   2> 479487 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 479490 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@14b8158e{/,null,AVAILABLE}
   [junit4]   2> 479490 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.AbstractConnector Started 
ServerConnector@92c88b8{HTTP/1.1,[http/1.1]}{127.0.0.1:58831}
   [junit4]   2> 479490 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server Started @482951ms
   [junit4]   2> 479490 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:40339/hdfs__localhost_40339__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.x_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001_tempDir-002_control_data,
 hostContext=/, hostPort=58831, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/control-001/cores}
   [junit4]   2> 479491 ERROR (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging 
may be missing or incomplete.
   [junit4]   2> 479491 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ 
version 6.5.0
   [junit4]   2> 479491 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on 
port null
   [junit4]   2> 479491 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 479491 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-05T21:21:58.837Z
   [junit4]   2> 479509 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 479509 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.SolrXmlConfig Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/control-001/solr.xml
   [junit4]   2> 479518 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with 
params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 479519 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34658/solr
   [junit4]   2> 479551 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:58831_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 479552 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:58831_    ] o.a.s.c.OverseerElectionContext I am going to be the 
leader 127.0.0.1:58831_
   [junit4]   2> 479554 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:58831_    ] o.a.s.c.Overseer Overseer 
(id=97566649128845316-127.0.0.1:58831_-n_0000000000) starting
   [junit4]   2> 479582 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:58831_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:58831_
   [junit4]   2> 479589 INFO  
(zkCallback-132-thread-1-processing-n:127.0.0.1:58831_) [n:127.0.0.1:58831_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 479837 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:58831_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/control-001/cores
   [junit4]   2> 479837 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:58831_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 479851 INFO  
(OverseerStateUpdate-97566649128845316-127.0.0.1:58831_-n_0000000000) 
[n:127.0.0.1:58831_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 479867 WARN  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 479868 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 6.5.0
   [junit4]   2> 479894 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 479960 WARN  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 479964 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 479983 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection 
control_collection
   [junit4]   2> 479983 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:40339/solr_hdfs_home
   [junit4]   2> 479983 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 479983 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/control-001/cores/collection1],
 
dataDir=[hdfs://localhost:40339/solr_hdfs_home/control_collection/core_node1/data/]
   [junit4]   2> 479984 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@548b6ccb
   [junit4]   2> 479984 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/control_collection/core_node1/data/snapshot_metadata
   [junit4]   2> 479990 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 479990 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 479990 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Creating new global HDFS BlockCache
   [junit4]   2> 480007 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 480008 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/control_collection/core_node1/data
   [junit4]   2> 480031 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/control_collection/core_node1/data/index
   [junit4]   2> 480036 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 480036 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 480040 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 480040 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.MockRandomMergePolicy: 
org.apache.lucene.index.MockRandomMergePolicy@61690264
   [junit4]   2> 480073 INFO  (IPC Server handler 5 on 40339) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:46374 is 
added to blk_1073741825_1001{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-c9f84a73-d657-4274-99bd-979f052250a3:NORMAL:127.0.0.1:52639|RBW],
 
ReplicaUC[[DISK]DS-cdbd1df7-e984-4973-8109-00b1cda7d4a2:NORMAL:127.0.0.1:46374|RBW]]}
 size 0
   [junit4]   2> 480074 INFO  (IPC Server handler 6 on 40339) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:52639 is 
added to blk_1073741825_1001{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-cdbd1df7-e984-4973-8109-00b1cda7d4a2:NORMAL:127.0.0.1:46374|RBW],
 
ReplicaUC[[DISK]DS-503133e2-debd-4d14-bb2c-4a78c882f9a2:NORMAL:127.0.0.1:52639|FINALIZED]]}
 size 0
   [junit4]   2> 480097 WARN  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 480126 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler 
Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 480126 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 480126 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 480135 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Hard AutoCommit: disabled
   [junit4]   2> 480136 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Soft AutoCommit: disabled
   [junit4]   2> 480136 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=2, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.24878073041265872]
   [junit4]   2> 480175 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@6ff6d3da[collection1] main]
   [junit4]   2> 480176 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 480176 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 480176 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 480178 INFO  
(searcherExecutor-403-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@6ff6d3da[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 480179 INFO  
(coreLoadExecutor-402-thread-1-processing-n:127.0.0.1:58831_) 
[n:127.0.0.1:58831_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1561066387039846400
   [junit4]   2> 480199 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas 
found to continue.
   [junit4]   2> 480199 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new 
leader - try and sync
   [junit4]   2> 480199 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:58831/collection1/
   [junit4]   2> 480199 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync 
replicas to me
   [junit4]   2> 480199 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
http://127.0.0.1:58831/collection1/ has no replicas
   [junit4]   2> 480199 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Found all 
replicas participating in election, clear LIR
   [junit4]   2> 480204 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new 
leader: http://127.0.0.1:58831/collection1/ shard1
   [junit4]   2> 480356 INFO  
(coreZkRegister-395-thread-1-processing-n:127.0.0.1:58831_ x:collection1 
c:control_collection) [n:127.0.0.1:58831_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery 
necessary
   [junit4]   2> 480406 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 480407 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34658/solr 
ready
   [junit4]   2> 480407 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause 
connection loss:false
   [junit4]   2> 480481 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-1-001/cores/collection1
   [junit4]   2> 480481 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-1-001
   [junit4]   2> 480482 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 480483 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@f745933{/,null,AVAILABLE}
   [junit4]   2> 480484 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.AbstractConnector Started 
ServerConnector@d4b7009{HTTP/1.1,[http/1.1]}{127.0.0.1:42962}
   [junit4]   2> 480484 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server Started @483944ms
   [junit4]   2> 480484 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:40339/hdfs__localhost_40339__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.x_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001_tempDir-002_jetty1,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=42962, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-1-001/cores}
   [junit4]   2> 480484 ERROR (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging 
may be missing or incomplete.
   [junit4]   2> 480485 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ 
version 6.5.0
   [junit4]   2> 480485 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on 
port null
   [junit4]   2> 480485 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 480485 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-05T21:21:59.831Z
   [junit4]   2> 480492 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 480492 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.SolrXmlConfig Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-1-001/solr.xml
   [junit4]   2> 480497 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with 
params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 480497 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34658/solr
   [junit4]   2> 480523 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:42962_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (1)
   [junit4]   2> 480525 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:42962_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 480530 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:42962_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:42962_
   [junit4]   2> 480535 INFO  (zkCallback-136-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 480535 INFO  
(zkCallback-132-thread-2-processing-n:127.0.0.1:58831_) [n:127.0.0.1:58831_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 480536 INFO  
(zkCallback-141-thread-1-processing-n:127.0.0.1:42962_) [n:127.0.0.1:42962_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 480592 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:42962_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-1-001/cores
   [junit4]   2> 480592 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:42962_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 480603 INFO  
(OverseerStateUpdate-97566649128845316-127.0.0.1:58831_-n_0000000000) 
[n:127.0.0.1:58831_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 481623 WARN  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 481624 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.5.0
   [junit4]   2> 481635 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 481712 WARN  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 481726 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 481734 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 481734 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:40339/solr_hdfs_home
   [junit4]   2> 481734 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Solr Kerberos Authentication disabled
   [junit4]   2> 481734 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-1-001/cores/collection1],
 dataDir=[hdfs://localhost:40339/solr_hdfs_home/collection1/core_node1/data/]
   [junit4]   2> 481734 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@548b6ccb
   [junit4]   2> 481734 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node1/data/snapshot_metadata
   [junit4]   2> 481740 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 481740 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Block cache target memory usage, slab size of [8388608] will allocate [1] slabs 
and use ~[8388608] bytes
   [junit4]   2> 481743 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.s.b.BlockDirectory 
Block cache on write is disabled
   [junit4]   2> 481743 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node1/data
   [junit4]   2> 481755 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node1/data/index
   [junit4]   2> 481760 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 481760 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Block cache target memory usage, slab size of [8388608] will allocate [1] slabs 
and use ~[8388608] bytes
   [junit4]   2> 481762 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.s.b.BlockDirectory 
Block cache on write is disabled
   [junit4]   2> 481762 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: 
org.apache.lucene.index.MockRandomMergePolicy@75fe9df8
   [junit4]   2> 481774 INFO  (IPC Server handler 3 on 40339) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:52639 is 
added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-cdbd1df7-e984-4973-8109-00b1cda7d4a2:NORMAL:127.0.0.1:46374|RBW],
 
ReplicaUC[[DISK]DS-c9f84a73-d657-4274-99bd-979f052250a3:NORMAL:127.0.0.1:52639|FINALIZED]]}
 size 0
   [junit4]   2> 481775 INFO  (IPC Server handler 4 on 40339) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:46374 is 
added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-c9f84a73-d657-4274-99bd-979f052250a3:NORMAL:127.0.0.1:52639|FINALIZED],
 
ReplicaUC[[DISK]DS-85f4f4c2-430e-49c7-b25c-5921ebe6b6e5:NORMAL:127.0.0.1:46374|FINALIZED]]}
 size 0
   [junit4]   2> 481781 WARN  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 481811 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 481811 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 481811 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 481823 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 481823 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 481824 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=2, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.24878073041265872]
   [junit4]   2> 481834 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@452c82f6[collection1] main]
   [junit4]   2> 481835 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 481835 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 481835 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 481836 INFO  
(searcherExecutor-414-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@452c82f6[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 481839 INFO  
(coreLoadExecutor-413-thread-1-processing-n:127.0.0.1:42962_) 
[n:127.0.0.1:42962_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1561066388780482560
   [junit4]   2> 481849 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to 
continue.
   [junit4]   2> 481849 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try 
and sync
   [junit4]   2> 481849 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:42962/collection1/
   [junit4]   2> 481849 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 481849 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:42962/collection1/ has no 
replicas
   [junit4]   2> 481849 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas 
participating in election, clear LIR
   [junit4]   2> 481853 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:42962/collection1/ shard1
   [junit4]   2> 482003 INFO  
(coreZkRegister-408-thread-1-processing-n:127.0.0.1:42962_ x:collection1 
c:collection1) [n:127.0.0.1:42962_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 482207 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-2-001/cores/collection1
   [junit4]   2> 482208 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-2-001
   [junit4]   2> 482209 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 482210 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@29e74a4c{/,null,AVAILABLE}
   [junit4]   2> 482210 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.AbstractConnector Started 
ServerConnector@5a7ebf2c{HTTP/1.1,[http/1.1]}{127.0.0.1:60249}
   [junit4]   2> 482210 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server Started @485671ms
   [junit4]   2> 482210 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:40339/hdfs__localhost_40339__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.x_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001_tempDir-002_jetty2,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=60249, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-2-001/cores}
   [junit4]   2> 482210 ERROR (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging 
may be missing or incomplete.
   [junit4]   2> 482211 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ 
version 6.5.0
   [junit4]   2> 482211 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on 
port null
   [junit4]   2> 482211 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 482211 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-05T21:22:01.557Z
   [junit4]   2> 482218 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 482218 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.SolrXmlConfig Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-2-001/solr.xml
   [junit4]   2> 482226 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with 
params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 482226 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34658/solr
   [junit4]   2> 482253 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:60249_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (2)
   [junit4]   2> 482254 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:60249_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 482256 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:60249_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:60249_
   [junit4]   2> 482257 INFO  
(zkCallback-141-thread-1-processing-n:127.0.0.1:42962_) [n:127.0.0.1:42962_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 482257 INFO  (zkCallback-136-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 482257 INFO  
(zkCallback-132-thread-1-processing-n:127.0.0.1:58831_) [n:127.0.0.1:58831_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 482266 INFO  
(zkCallback-147-thread-1-processing-n:127.0.0.1:60249_) [n:127.0.0.1:60249_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 482347 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:60249_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-2-001/cores
   [junit4]   2> 482347 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:60249_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 482351 INFO  
(OverseerStateUpdate-97566649128845316-127.0.0.1:58831_-n_0000000000) 
[n:127.0.0.1:58831_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 483391 WARN  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 483392 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.5.0
   [junit4]   2> 483403 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 483874 WARN  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 483876 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 483905 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 483907 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:40339/solr_hdfs_home
   [junit4]   2> 483907 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Solr Kerberos Authentication disabled
   [junit4]   2> 483907 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-2-001/cores/collection1],
 dataDir=[hdfs://localhost:40339/solr_hdfs_home/collection1/core_node2/data/]
   [junit4]   2> 483907 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@548b6ccb
   [junit4]   2> 483907 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node2/data/snapshot_metadata
   [junit4]   2> 483913 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 483913 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Block cache target memory usage, slab size of [8388608] will allocate [1] slabs 
and use ~[8388608] bytes
   [junit4]   2> 483931 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.s.b.BlockDirectory 
Block cache on write is disabled
   [junit4]   2> 483931 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node2/data
   [junit4]   2> 483957 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node2/data/index
   [junit4]   2> 483973 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 483973 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Block cache target memory usage, slab size of [8388608] will allocate [1] slabs 
and use ~[8388608] bytes
   [junit4]   2> 483975 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.s.b.BlockDirectory 
Block cache on write is disabled
   [junit4]   2> 483976 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: 
org.apache.lucene.index.MockRandomMergePolicy@12dc84bb
   [junit4]   2> 484063 INFO  (IPC Server handler 4 on 40339) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:46374 is 
added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-c9f84a73-d657-4274-99bd-979f052250a3:NORMAL:127.0.0.1:52639|RBW],
 
ReplicaUC[[DISK]DS-cdbd1df7-e984-4973-8109-00b1cda7d4a2:NORMAL:127.0.0.1:46374|RBW]]}
 size 0
   [junit4]   2> 484064 INFO  (IPC Server handler 3 on 40339) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:52639 is 
added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-cdbd1df7-e984-4973-8109-00b1cda7d4a2:NORMAL:127.0.0.1:46374|RBW],
 
ReplicaUC[[DISK]DS-503133e2-debd-4d14-bb2c-4a78c882f9a2:NORMAL:127.0.0.1:52639|FINALIZED]]}
 size 0
   [junit4]   2> 484085 WARN  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 484166 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 484166 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 484166 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.HdfsUpdateLog 
Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 484176 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 484176 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 484182 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=2, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.24878073041265872]
   [junit4]   2> 484189 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@721d19bc[collection1] main]
   [junit4]   2> 484190 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 484191 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 484191 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 484192 INFO  
(searcherExecutor-425-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
c:collection1) [n:127.0.0.1:60249_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@721d19bc[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 484194 INFO  
(coreLoadExecutor-424-thread-1-processing-n:127.0.0.1:60249_) 
[n:127.0.0.1:60249_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1561066391249879040
   [junit4]   2> 484198 INFO  
(coreZkRegister-419-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
c:collection1) [n:127.0.0.1:60249_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 484199 INFO  
(updateExecutor-144-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 484199 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 484199 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 484199 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 484199 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
HDFSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 484199 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [http://127.0.0.1:42962/collection1/] 
and I am [http://127.0.0.1:60249/collection1/]
   [junit4]   2> 484208 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [http://127.0.0.1:42962]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:60249_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 484240 INFO  (qtp1331384785-26631) [n:127.0.0.1:42962_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 484240 INFO  (qtp1331384785-26631) [n:127.0.0.1:42962_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 484240 INFO  (qtp1331384785-26631) [n:127.0.0.1:42962_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:60249_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","dataDir":"hdfs://localhost:40339/solr_hdfs_home/collection1/core_node2/data/","base_url":"http://127.0.0.1:60249","node_name":"127.0.0.1:60249_","state":"recovering","ulogDir":"hdfs://localhost:40339/solr_hdfs_home/collection1/core_node2/data/tlog"}
   [junit4]   2> 484240 INFO  (qtp1331384785-26631) [n:127.0.0.1:42962_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, 
checkLive: true, onlyIfLeader: true for: 0 seconds.
   [junit4]   2> 484241 INFO  (qtp1331384785-26631) [n:127.0.0.1:42962_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:60249_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1
   [junit4]   2> 484476 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-3-001/cores/collection1
   [junit4]   2> 484477 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-3-001
   [junit4]   2> 484477 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 484478 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@506e46b4{/,null,AVAILABLE}
   [junit4]   2> 484479 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.AbstractConnector Started 
ServerConnector@6519f9c6{HTTP/1.1,[http/1.1]}{127.0.0.1:36361}
   [junit4]   2> 484479 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.e.j.s.Server Started @487939ms
   [junit4]   2> 484479 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:40339/hdfs__localhost_40339__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.x_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001_tempDir-002_jetty3,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=36361, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-3-001/cores}
   [junit4]   2> 484479 ERROR (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging 
may be missing or incomplete.
   [junit4]   2> 484480 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ 
version 6.5.0
   [junit4]   2> 484480 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on 
port null
   [junit4]   2> 484480 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 484480 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-03-05T21:22:03.826Z
   [junit4]   2> 484494 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 484494 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.SolrXmlConfig Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-3-001/solr.xml
   [junit4]   2> 484499 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with 
params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 484500 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34658/solr
   [junit4]   2> 484516 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:36361_    ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (3)
   [junit4]   2> 484517 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:36361_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 484519 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:36361_    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:36361_
   [junit4]   2> 484520 INFO  (zkCallback-136-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 484521 INFO  
(zkCallback-132-thread-3-processing-n:127.0.0.1:58831_) [n:127.0.0.1:58831_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 484520 INFO  
(zkCallback-147-thread-1-processing-n:127.0.0.1:60249_) [n:127.0.0.1:60249_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 484520 INFO  
(zkCallback-141-thread-1-processing-n:127.0.0.1:42962_) [n:127.0.0.1:42962_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 484522 INFO  
(zkCallback-154-thread-1-processing-n:127.0.0.1:36361_) [n:127.0.0.1:36361_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 484578 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:36361_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions 
underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-3-001/cores
   [junit4]   2> 484578 INFO  (TEST-StressHdfsTest.test-seed#[BE76C68615415A1]) 
[n:127.0.0.1:36361_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 484586 INFO  
(OverseerStateUpdate-97566649128845316-127.0.0.1:58831_-n_0000000000) 
[n:127.0.0.1:58831_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 484749 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [http://127.0.0.1:42962/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 484749 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=http://127.0.0.1:60249 START replicas=[http://127.0.0.1:42962/collection1/] 
nUpdates=100
   [junit4]   2> 484763 INFO  (qtp1331384785-26629) [n:127.0.0.1:42962_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:9.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 484763 INFO  (qtp1331384785-26629) [n:127.0.0.1:42962_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=10
   [junit4]   2> 484768 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 484768 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 484768 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 484768 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 484769 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 484769 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 484769 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 484769 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 484769 INFO  
(recoveryExecutor-145-thread-1-processing-n:127.0.0.1:60249_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:60249_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 485603 WARN  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 485603 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 6.5.0
   [junit4]   2> 485614 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 485689 WARN  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 485691 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 485701 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 485701 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:40339/solr_hdfs_home
   [junit4]   2> 485701 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Solr Kerberos Authentication disabled
   [junit4]   2> 485701 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.StressHdfsTest_BE76C68615415A1-001/shard-3-001/cores/collection1],
 dataDir=[hdfs://localhost:40339/solr_hdfs_home/collection1/core_node3/data/]
   [junit4]   2> 485701 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@548b6ccb
   [junit4]   2> 485702 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node3/data/snapshot_metadata
   [junit4]   2> 485708 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 485708 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Block cache target memory usage, slab size of [8388608] will allocate [1] slabs 
and use ~[8388608] bytes
   [junit4]   2> 485710 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.s.b.BlockDirectory 
Block cache on write is disabled
   [junit4]   2> 485711 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node3/data
   [junit4]   2> 485723 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
creating directory factory for path 
hdfs://localhost:40339/solr_hdfs_home/collection1/core_node3/data/index
   [junit4]   2> 485727 INFO  
(coreLoadExecutor-435-thread-1-processing-n:127.0.0.1:36361_) 
[n:127.0.0.1:36361_ c:collection1   x:collection1] o.a.s.c.HdfsDirectoryFactory 
Number of slabs of block cache [1] with direct memory allocation set to [tru

[...truncated too long message...]

Fcu    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot 
talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 954112 INFO  
(zkCallback-1009-thread-2-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu    ] o.a.s.c.OverseerElectionContext I am going to 
be the leader 127.0.0.1:36118_ne_%2Fcu
   [junit4]   2> 954112 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 954116 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.e.j.s.AbstractConnector Stopped 
ServerConnector@30657da6{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 954116 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@4e7ead78{/ne_/cu,null,UNAVAILABLE}
   [junit4]   2> 954117 INFO  
(zkCallback-1009-thread-3-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 954117 INFO  
(zkCallback-1009-thread-4-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (1)
   [junit4]   2> 954118 INFO  
(zkCallback-1009-thread-2-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu    ] o.a.s.c.Overseer Overseer 
(id=97566674662326286-127.0.0.1:36118_ne_%2Fcu-n_0000000003) starting
   [junit4]   2> 954126 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.a.s.c.ChaosMonkey monkey: stop shard! 36118
   [junit4]   2> 954126 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.a.s.c.CoreContainer Shutting down CoreContainer instance=1136818412
   [junit4]   2> 954136 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.a.s.c.Overseer Overseer 
(id=97566674662326286-127.0.0.1:36118_ne_%2Fcu-n_0000000003) closing
   [junit4]   2> 954136 INFO  
(OverseerStateUpdate-97566674662326286-127.0.0.1:36118_ne_%2Fcu-n_0000000003) 
[n:127.0.0.1:36118_ne_%2Fcu    ] o.a.s.c.Overseer Overseer Loop exiting : 
127.0.0.1:36118_ne_%2Fcu
   [junit4]   2> 954137 WARN  
(zkCallback-1009-thread-6-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu    ] o.a.s.c.c.ZkStateReader ZooKeeper watch 
triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for 
/live_nodes]
   [junit4]   2> 956617 WARN  
(zkCallback-1009-thread-3-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.c.SyncStrategy Closed, skipping sync up.
   [junit4]   2> 956617 INFO  
(zkCallback-1009-thread-3-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.c.SolrCore [collection1]  CLOSING SolrCore 
org.apache.solr.core.SolrCore@6c4f8360
   [junit4]   2> 956643 INFO  
(zkCallback-1009-thread-3-processing-n:127.0.0.1:36118_ne_%2Fcu) 
[n:127.0.0.1:36118_ne_%2Fcu c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 956726 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 956733 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.e.j.s.AbstractConnector Stopped 
ServerConnector@52062d5c{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 956733 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@78ed4fb5{/ne_/cu,null,UNAVAILABLE}
   [junit4]   2> 956742 INFO  
(TEST-TestInPlaceUpdatesDistrib.test-seed#[BE76C68615415A1]) [    ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1:33249 33249
   [junit4]   2> 956829 INFO  (Thread-1181) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1:33249 33249
   [junit4]   2> 956829 WARN  (Thread-1181) [    ] o.a.s.c.ZkTestServer Watch 
limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2>        5       /solr/aliases.json
   [junit4]   2>        5       /solr/clusterprops.json
   [junit4]   2>        4       /solr/security.json
   [junit4]   2>        4       /solr/configs/conf1
   [junit4]   2>        3       /solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2>        5       /solr/clusterstate.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2>        88      /solr/overseer/collection-queue-work
   [junit4]   2>        27      /solr/overseer/queue
   [junit4]   2>        5       /solr/live_nodes
   [junit4]   2>        5       /solr/collections
   [junit4]   2>        4       /solr/overseer/queue-work
   [junit4]   2> 
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 
'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=TestInPlaceUpdatesDistrib -Dtests.method=test 
-Dtests.seed=BE76C68615415A1 -Dtests.multiplier=2 -Dtests.nightly=true 
-Dtests.slow=true 
-Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/test-data/enwiki.random.lines.txt
 -Dtests.locale=da-DK -Dtests.timezone=Africa/Nairobi -Dtests.asserts=true 
-Dtests.file.encoding=UTF-8
   [junit4] FAILURE 88.4s J0 | TestInPlaceUpdatesDistrib.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: 'sanitycheck' results 
against client: org.apache.solr.client.solrj.impl.HttpSolrClient@1637f00 (not 
leader) wrong [docid] for SolrDocument{id=94, 
id_field_copy_that_does_not_support_in_place_update_s=94, title_s=title94, 
id_i=94, inplace_updatable_float=101.0, _version_=1561066829263142912, 
inplace_updatable_int_with_default=666, 
inplace_updatable_float_with_default=42.0, [docid]=5586} expected:<6049> but 
was:<5586>
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([BE76C68615415A1:83B353B2CFA87859]:0)
   [junit4]    >        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.assertDocIdsAndValuesInResults(TestInPlaceUpdatesDistrib.java:442)
   [junit4]    >        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.assertDocIdsAndValuesAgainstAllClients(TestInPlaceUpdatesDistrib.java:413)
   [junit4]    >        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.docValuesUpdateTest(TestInPlaceUpdatesDistrib.java:321)
   [junit4]    >        at 
org.apache.solr.update.TestInPlaceUpdatesDistrib.test(TestInPlaceUpdatesDistrib.java:140)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
   [junit4]    >        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 956832 INFO  
(SUITE-TestInPlaceUpdatesDistrib-seed#[BE76C68615415A1]-worker) [    ] 
o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> 956832 INFO  
(SUITE-TestInPlaceUpdatesDistrib-seed#[BE76C68615415A1]-worker) [    ] 
o.a.s.c.CoreContainer Shutting down CoreContainer instance=1367349377
   [junit4]   2> 956832 INFO  (coreCloseExecutor-2471-thread-1) [    
x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore 
org.apache.solr.core.SolrCore@40e96d42
   [junit4]   2> 956844 INFO  (coreCloseExecutor-2471-thread-1) [    
x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: 
solr.core.collection1
   [junit4]   2> 956846 INFO  
(SUITE-TestInPlaceUpdatesDistrib-seed#[BE76C68615415A1]-worker) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> NOTE: leaving temporary files on disk at: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.x/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_BE76C68615415A1-001
   [junit4]   2> Mar 05, 2017 9:29:56 PM 
com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 1 leaked 
thread(s).
   [junit4]   2> NOTE: test params are: codec=Lucene62, 
sim=RandomSimilarity(queryNorm=false,coord=no): {}, locale=da-DK, 
timezone=Africa/Nairobi
   [junit4]   2> NOTE: Linux 3.13.0-85-generic amd64/Oracle Corporation 
1.8.0_121 (64-bit)/cpus=4,threads=1,free=171107024,total=527433728
   [junit4]   2> NOTE: All tests run in this JVM: [TestFastLRUCache, 
TestFaceting, TestSizeLimitedDistributedMap, TestCustomDocTransformer, 
TestReRankQParserPlugin, LeaderElectionIntegrationTest, 
CoreMergeIndexesAdminHandlerTest, TestCloudSchemaless, 
DisMaxRequestHandlerTest, TestDynamicFieldResource, DeleteStatusTest, 
ResponseHeaderTest, TestJmxIntegration, TestComplexPhraseLeadingWildcard, 
TestJoin, TestHighlightDedupGrouping, CacheHeaderTest, 
TestObjectReleaseTracker, FastVectorHighlighterTest, 
XsltUpdateRequestHandlerTest, TestOmitPositions, NotRequiredUniqueKeyTest, 
DeleteNodeTest, TestDynamicLoading, TestFieldCacheSanityChecker, 
TestReplicaProperties, PeerSyncTest, 
OverriddenZkACLAndCredentialsProvidersTest, VersionInfoTest, TestConfigSets, 
TestRestManager, TestSolr4Spatial, TestDocTermOrds, ConfigSetsAPITest, 
TestIntervalFaceting, ShufflingReplicaListTransformerTest, 
TestUniqueKeyFieldResource, UUIDUpdateProcessorFallbackTest, 
DistributedFacetPivotSmallAdvancedTest, UUIDFieldTest, 
DistributedFacetExistsSmallTest, TestJsonFacets, 
PKIAuthenticationIntegrationTest, SuggestComponentTest, 
TestDFISimilarityFactory, MissingSegmentRecoveryTest, CursorPagingTest, 
ShardSplitTest, SecurityConfHandlerTest, TestDistribIDF, 
TestSolrCoreProperties, TestScoreJoinQPNoScore, TestInPlaceUpdatesDistrib]
   [junit4] Completed [125/697 (2!)] on J0 in 89.11s, 1 test, 1 failure <<< 
FAILURES!

[...truncated 64472 lines...]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to