Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-6.5/8/

2 tests failed.
FAILED:  org.apache.solr.cloud.hdfs.StressHdfsTest.test

Error Message:
Timeout occured while waiting response from server at: 
http://127.0.0.1:40813/v_ghc/jp

Stack Trace:
org.apache.solr.client.solrj.SolrServerException: Timeout occured while waiting 
response from server at: http://127.0.0.1:40813/v_ghc/jp
        at 
__randomizedtesting.SeedInfo.seed([4A8D6E82FB94F27:8CFCE932814522DF]:0)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:621)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:279)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:268)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:435)
        at 
org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:387)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:1376)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:1127)
        at 
org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:1057)
        at org.apache.solr.client.solrj.SolrClient.request(SolrClient.java:1219)
        at 
org.apache.solr.cloud.hdfs.StressHdfsTest.createAndDeleteCollection(StressHdfsTest.java:220)
        at 
org.apache.solr.cloud.hdfs.StressHdfsTest.test(StressHdfsTest.java:103)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.net.SocketTimeoutException: Read timed out
        at java.net.SocketInputStream.socketRead0(Native Method)
        at java.net.SocketInputStream.socketRead(SocketInputStream.java:116)
        at java.net.SocketInputStream.read(SocketInputStream.java:171)
        at java.net.SocketInputStream.read(SocketInputStream.java:141)
        at 
org.apache.http.impl.io.AbstractSessionInputBuffer.fillBuffer(AbstractSessionInputBuffer.java:160)
        at 
org.apache.http.impl.io.SocketInputBuffer.fillBuffer(SocketInputBuffer.java:84)
        at 
org.apache.http.impl.io.AbstractSessionInputBuffer.readLine(AbstractSessionInputBuffer.java:273)
        at 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:140)
        at 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:57)
        at 
org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:261)
        at 
org.apache.http.impl.AbstractHttpClientConnection.receiveResponseHeader(AbstractHttpClientConnection.java:283)
        at 
org.apache.http.impl.conn.DefaultClientConnection.receiveResponseHeader(DefaultClientConnection.java:251)
        at 
org.apache.http.impl.conn.ManagedClientConnectionImpl.receiveResponseHeader(ManagedClientConnectionImpl.java:197)
        at 
org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:272)
        at 
org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:124)
        at 
org.apache.http.impl.client.DefaultRequestDirector.tryExecute(DefaultRequestDirector.java:685)
        at 
org.apache.http.impl.client.DefaultRequestDirector.execute(DefaultRequestDirector.java:487)
        at 
org.apache.http.impl.client.AbstractHttpClient.doExecute(AbstractHttpClient.java:882)
        at 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:82)
        at 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:55)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:515)
        ... 51 more


FAILED:  
org.apache.solr.handler.component.TestDistributedStatsComponentCardinality.test

Error Message:
Captured an uncaught exception in thread: Thread[id=30575, name=Thread-10783, 
state=RUNNABLE, group=TGRP-TestDistributedStatsComponentCardinality]

Stack Trace:
com.carrotsearch.randomizedtesting.UncaughtExceptionError: Captured an uncaught 
exception in thread: Thread[id=30575, name=Thread-10783, state=RUNNABLE, 
group=TGRP-TestDistributedStatsComponentCardinality]
        at 
__randomizedtesting.SeedInfo.seed([4A8D6E82FB94F27:8CFCE932814522DF]:0)
Caused by: 
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error 
from server at http://127.0.0.1:53396/_/collection1: 
org.apache.solr.client.solrj.SolrServerException: IOException occured when 
talking to server at: http://127.0.0.1:53285/_/collection1
        at __randomizedtesting.SeedInfo.seed([4A8D6E82FB94F27]:0)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:610)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:279)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:268)
        at 
org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:160)
        at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:942)
        at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$5.run(BaseDistributedSearchTestCase.java:627)




Build Log:
[...truncated 12702 lines...]
   [junit4] Suite: org.apache.solr.cloud.hdfs.StressHdfsTest
   [junit4]   2> Creating dataDir: 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/init-core-data-001
   [junit4]   2> 2962939 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using PointFields
   [junit4]   2> 2962940 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: 
@org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 2962940 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: 
/v_ghc/jp
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 2963014 WARN  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried 
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 2963048 WARN  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2963054 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 2963070 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.2-tests.jar!/webapps/hdfs
 to ./temp/Jetty_localhost_35998_hdfs____lg2www/webapp
   [junit4]   2> 2963474 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log Started 
HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:35998
   [junit4]   2> 2963546 WARN  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2963548 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 2963562 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.2-tests.jar!/webapps/datanode
 to ./temp/Jetty_localhost_47776_datanode____.u3z9gm/webapp
   [junit4]   2> 2963950 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log Started 
HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:47776
   [junit4]   2> 2964073 WARN  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2964074 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 2964100 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log Extract 
jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.2-tests.jar!/webapps/datanode
 to ./temp/Jetty_localhost_52359_datanode____7yd269/webapp
   [junit4]   2> 2964274 INFO  (IPC Server handler 3 on 33948) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-bc19ce75-7917-4bc6-a91d-4bcc8a220c79 node 
DatanodeRegistration(127.0.0.1:54396, 
datanodeUuid=60b8cd62-ff88-4133-8041-689e1ff82b99, infoPort=46011, 
infoSecurePort=0, ipcPort=47780, 
storageInfo=lv=-56;cid=testClusterID;nsid=1570676144;c=0), blocks: 0, 
hasStaleStorage: true, processing time: 1 msecs
   [junit4]   2> 2964274 INFO  (IPC Server handler 3 on 33948) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-132806c9-e06b-4ef6-a40d-44501a5dca87 node 
DatanodeRegistration(127.0.0.1:54396, 
datanodeUuid=60b8cd62-ff88-4133-8041-689e1ff82b99, infoPort=46011, 
infoSecurePort=0, ipcPort=47780, 
storageInfo=lv=-56;cid=testClusterID;nsid=1570676144;c=0), blocks: 0, 
hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 2964625 INFO  
(SUITE-StressHdfsTest-seed#[4A8D6E82FB94F27]-worker) [    ] o.m.log Started 
HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:52359
   [junit4]   2> 2964760 INFO  (IPC Server handler 4 on 33948) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-4ca215dc-b111-4c33-ad88-2a749a87cf1f node 
DatanodeRegistration(127.0.0.1:53188, 
datanodeUuid=0d68c4e8-3a01-48fd-abdd-434390817b70, infoPort=49396, 
infoSecurePort=0, ipcPort=50396, 
storageInfo=lv=-56;cid=testClusterID;nsid=1570676144;c=0), blocks: 0, 
hasStaleStorage: true, processing time: 0 msecs
   [junit4]   2> 2964760 INFO  (IPC Server handler 4 on 33948) [    ] 
BlockStateChange BLOCK* processReport: from storage 
DS-e283116e-33a6-4176-a964-41b8053df33a node 
DatanodeRegistration(127.0.0.1:53188, 
datanodeUuid=0d68c4e8-3a01-48fd-abdd-434390817b70, infoPort=49396, 
infoSecurePort=0, ipcPort=50396, 
storageInfo=lv=-56;cid=testClusterID;nsid=1570676144;c=0), blocks: 0, 
hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 2964997 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ZkTestServer 
STARTING ZK TEST SERVER
   [junit4]   2> 2964997 INFO  (Thread-51363) [    ] o.a.s.c.ZkTestServer 
client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 2964997 INFO  (Thread-51363) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 2965097 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ZkTestServer 
start zk server on port:44301
   [junit4]   2> 2965115 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 2965117 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 2965119 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 2965121 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 2965126 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 2965127 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 2965129 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 2965130 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 2965131 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 2965133 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 2965134 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 2965253 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.SolrTestCaseJ4 
Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/control-001/cores/collection1
   [junit4]   2> 2965255 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server 
jetty-9.3.14.v20161028
   [junit4]   2> 2965256 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@54c46106{/v_ghc/jp,null,AVAILABLE}
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@44640ec5{HTTP/1.1,[http/1.1]}{127.0.0.1:40813}
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server Started 
@2971736ms
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:33948/hdfs__localhost_33948__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.5_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001_tempDir-002_control_data,
 hostContext=/v_ghc/jp, hostPort=40813, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/control-001/cores}
   [junit4]   2> 2965257 ERROR 
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.5.0
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2965257 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-04-06T12:48:06.204Z
   [junit4]   2> 2965262 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2965262 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.SolrXmlConfig 
Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/control-001/solr.xml
   [junit4]   2> 2965269 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 2965270 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ZkContainer 
Zookeeper client=127.0.0.1:44301/solr
   [junit4]   2> 2965292 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:40813_v_ghc%2Fjp 
   ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 2965293 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:40813_v_ghc%2Fjp 
   ] o.a.s.c.OverseerElectionContext I am going to be the leader 
127.0.0.1:40813_v_ghc%2Fjp
   [junit4]   2> 2965294 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:40813_v_ghc%2Fjp 
   ] o.a.s.c.Overseer Overseer 
(id=97745822413357060-127.0.0.1:40813_v_ghc%2Fjp-n_0000000000) starting
   [junit4]   2> 2965301 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:40813_v_ghc%2Fjp 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:40813_v_ghc%2Fjp
   [junit4]   2> 2965308 INFO  
(zkCallback-2015-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (1)
   [junit4]   2> 2965379 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:40813_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/control-001/cores
   [junit4]   2> 2965379 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:40813_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2965390 INFO  
(OverseerStateUpdate-97745822413357060-127.0.0.1:40813_v_ghc%2Fjp-n_0000000000) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.o.ReplicaMutator Assigning new node 
to shard shard=shard1
   [junit4]   2> 2966400 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.5.0
   [junit4]   2> 2966415 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2966529 WARN  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.s.IndexSchema [collection1] default search field in schema is text. 
WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2966532 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2966545 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection control_collection
   [junit4]   2> 2966545 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:33948/solr_hdfs_home
   [junit4]   2> 2966546 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2966546 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 2966546 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/control-001/cores/collection1],
 
dataDir=[hdfs://localhost:33948/solr_hdfs_home/control_collection/core_node1/data/]
   [junit4]   2> 2966547 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@558a952a
   [junit4]   2> 2966547 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/control_collection/core_node1/data/snapshot_metadata
   [junit4]   2> 2966563 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2966563 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2966576 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2966584 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/control_collection/core_node1/data
   [junit4]   2> 2966609 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/control_collection/core_node1/data/index
   [junit4]   2> 2966618 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2966618 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2966626 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2966626 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=4, maxMergeAtOnceExplicit=2, maxMergedSegmentMB=1.8095703125, 
floorSegmentMB=1.1337890625, forceMergeDeletesPctAllowed=23.087638926508667, 
segmentsPerTier=23.0, maxCFSSegmentSizeMB=2.0625, noCFSRatio=0.7244014515235152
   [junit4]   2> 2966657 INFO  (IPC Server handler 9 on 33948) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:53188 is 
added to blk_1073741825_1001{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-132806c9-e06b-4ef6-a40d-44501a5dca87:NORMAL:127.0.0.1:54396|RBW],
 
ReplicaUC[[DISK]DS-4ca215dc-b111-4c33-ad88-2a749a87cf1f:NORMAL:127.0.0.1:53188|FINALIZED]]}
 size 0
   [junit4]   2> 2966660 INFO  (IPC Server handler 1 on 33948) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:54396 is 
added to blk_1073741825_1001 size 71
   [junit4]   2> 2966672 WARN  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 2966748 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2966748 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2966748 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 2966760 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2966760 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2966763 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=20, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=0.6474609375, noCFSRatio=0.0]
   [junit4]   2> 2966795 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@160677d0[collection1] main]
   [junit4]   2> 2966797 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2966798 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2966798 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2966800 INFO  
(searcherExecutor-4943-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered 
new searcher Searcher@160677d0[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2966803 INFO  
(coreLoadExecutor-4942-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp c:control_collection   x:collection1] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1563933160505344000
   [junit4]   2> 2966822 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2966823 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2966823 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
Sync replicas to http://127.0.0.1:40813/v_ghc/jp/collection1/
   [junit4]   2> 2966823 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
Sync Success - now sync replicas to me
   [junit4]   2> 2966823 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
http://127.0.0.1:40813/v_ghc/jp/collection1/ has no replicas
   [junit4]   2> 2966823 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext Found all replicas participating in 
election, clear LIR
   [junit4]   2> 2966828 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:40813/v_ghc/jp/collection1/ shard1
   [junit4]   2> 2966979 INFO  
(coreZkRegister-4935-thread-1-processing-n:127.0.0.1:40813_v_ghc%2Fjp 
x:collection1 c:control_collection) [n:127.0.0.1:40813_v_ghc%2Fjp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController 
I am the leader, no recovery necessary
   [junit4]   2> 2967225 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2967226 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44301/solr ready
   [junit4]   2> 2967227 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ChaosMonkey 
monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 2967343 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.SolrTestCaseJ4 
Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-1-001/cores/collection1
   [junit4]   2> 2967344 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-1-001
   [junit4]   2> 2967345 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server 
jetty-9.3.14.v20161028
   [junit4]   2> 2967346 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@6580348{/v_ghc/jp,null,AVAILABLE}
   [junit4]   2> 2967346 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@5bac08e{HTTP/1.1,[http/1.1]}{127.0.0.1:59032}
   [junit4]   2> 2967346 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server Started 
@2973826ms
   [junit4]   2> 2967347 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:33948/hdfs__localhost_33948__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.5_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001_tempDir-002_jetty1,
 solrconfig=solrconfig.xml, hostContext=/v_ghc/jp, hostPort=59032, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-1-001/cores}
   [junit4]   2> 2967347 ERROR 
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2967354 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.5.0
   [junit4]   2> 2967354 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2967354 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2967354 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-04-06T12:48:08.301Z
   [junit4]   2> 2967357 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2967357 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.SolrXmlConfig 
Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-1-001/solr.xml
   [junit4]   2> 2967365 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 2967366 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ZkContainer 
Zookeeper client=127.0.0.1:44301/solr
   [junit4]   2> 2967376 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:59032_v_ghc%2Fjp 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2967379 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:59032_v_ghc%2Fjp 
   ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 2967381 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:59032_v_ghc%2Fjp 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:59032_v_ghc%2Fjp
   [junit4]   2> 2967390 INFO  
(zkCallback-2015-thread-3-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (1) -> (2)
   [junit4]   2> 2967390 INFO  (zkCallback-2019-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2967392 INFO  
(zkCallback-2024-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (1) -> (2)
   [junit4]   2> 2967476 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:59032_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-1-001/cores
   [junit4]   2> 2967476 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:59032_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2967489 INFO  
(OverseerStateUpdate-97745822413357060-127.0.0.1:40813_v_ghc%2Fjp-n_0000000000) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.o.ReplicaMutator Assigning new node 
to shard shard=shard1
   [junit4]   2> 2968505 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 6.5.0
   [junit4]   2> 2968520 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2968638 WARN  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema [collection1] default search field in schema is text. 
WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2968641 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2968654 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection collection1
   [junit4]   2> 2968655 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:33948/solr_hdfs_home
   [junit4]   2> 2968655 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2968655 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrCore 
solr.RecoveryStrategy.Builder
   [junit4]   2> 2968656 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-1-001/cores/collection1],
 dataDir=[hdfs://localhost:33948/solr_hdfs_home/collection1/core_node1/data/]
   [junit4]   2> 2968656 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@558a952a
   [junit4]   2> 2968656 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node1/data/snapshot_metadata
   [junit4]   2> 2968667 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2968667 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2968674 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2968675 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node1/data
   [junit4]   2> 2968698 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node1/data/index
   [junit4]   2> 2968706 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2968706 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2968721 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2968722 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=4, maxMergeAtOnceExplicit=2, maxMergedSegmentMB=1.8095703125, 
floorSegmentMB=1.1337890625, forceMergeDeletesPctAllowed=23.087638926508667, 
segmentsPerTier=23.0, maxCFSSegmentSizeMB=2.0625, noCFSRatio=0.7244014515235152
   [junit4]   2> 2968791 INFO  (IPC Server handler 7 on 33948) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:54396 is 
added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-4ca215dc-b111-4c33-ad88-2a749a87cf1f:NORMAL:127.0.0.1:53188|RBW],
 
ReplicaUC[[DISK]DS-bc19ce75-7917-4bc6-a91d-4bcc8a220c79:NORMAL:127.0.0.1:54396|FINALIZED]]}
 size 0
   [junit4]   2> 2968792 INFO  (IPC Server handler 8 on 33948) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:53188 is 
added to blk_1073741826_1002 size 71
   [junit4]   2> 2968804 WARN  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 2968886 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2968886 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2968886 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 2968899 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2968899 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2968902 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=20, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=0.6474609375, noCFSRatio=0.0]
   [junit4]   2> 2968916 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@5ddd1e71[collection1] main]
   [junit4]   2> 2968918 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2968918 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2968919 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2968920 INFO  
(searcherExecutor-4954-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@5ddd1e71[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2968924 INFO  
(coreLoadExecutor-4953-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1563933162729373696
   [junit4]   2> 2968936 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough 
replicas found to continue.
   [junit4]   2> 2968936 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may 
be the new leader - try and sync
   [junit4]   2> 2968936 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:59032/v_ghc/jp/collection1/
   [junit4]   2> 2968936 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now 
sync replicas to me
   [junit4]   2> 2968937 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
http://127.0.0.1:59032/v_ghc/jp/collection1/ has no replicas
   [junit4]   2> 2968937 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Found 
all replicas participating in election, clear LIR
   [junit4]   2> 2968945 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am 
the new leader: http://127.0.0.1:59032/v_ghc/jp/collection1/ shard1
   [junit4]   2> 2969096 INFO  
(coreZkRegister-4948-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no 
recovery necessary
   [junit4]   2> 2969425 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.SolrTestCaseJ4 
Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-2-001/cores/collection1
   [junit4]   2> 2969426 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-2-001
   [junit4]   2> 2969426 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server 
jetty-9.3.14.v20161028
   [junit4]   2> 2969428 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@29ec3bbc{/v_ghc/jp,null,AVAILABLE}
   [junit4]   2> 2969428 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@4110bb01{HTTP/1.1,[http/1.1]}{127.0.0.1:52718}
   [junit4]   2> 2969428 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server Started 
@2975908ms
   [junit4]   2> 2969428 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:33948/hdfs__localhost_33948__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.5_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001_tempDir-002_jetty2,
 solrconfig=solrconfig.xml, hostContext=/v_ghc/jp, hostPort=52718, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-2-001/cores}
   [junit4]   2> 2969429 ERROR 
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2969430 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.5.0
   [junit4]   2> 2969430 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2969430 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2969430 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-04-06T12:48:10.377Z
   [junit4]   2> 2969433 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2969433 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.SolrXmlConfig 
Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-2-001/solr.xml
   [junit4]   2> 2969440 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 2969441 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ZkContainer 
Zookeeper client=127.0.0.1:44301/solr
   [junit4]   2> 2969452 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:52718_v_ghc%2Fjp 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2969455 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:52718_v_ghc%2Fjp 
   ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 2969457 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:52718_v_ghc%2Fjp 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:52718_v_ghc%2Fjp
   [junit4]   2> 2969460 INFO  
(zkCallback-2024-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (3)
   [junit4]   2> 2969460 INFO  (zkCallback-2019-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2969460 INFO  
(zkCallback-2015-thread-3-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (3)
   [junit4]   2> 2969466 INFO  
(zkCallback-2030-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (3)
   [junit4]   2> 2969531 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:52718_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-2-001/cores
   [junit4]   2> 2969531 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:52718_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2969537 INFO  
(OverseerStateUpdate-97745822413357060-127.0.0.1:40813_v_ghc%2Fjp-n_0000000000) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.o.ReplicaMutator Assigning new node 
to shard shard=shard1
   [junit4]   2> 2970550 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 6.5.0
   [junit4]   2> 2970568 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2970692 WARN  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema [collection1] default search field in schema is text. 
WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2970695 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2970707 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection collection1
   [junit4]   2> 2970707 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:33948/solr_hdfs_home
   [junit4]   2> 2970707 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2970707 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrCore 
solr.RecoveryStrategy.Builder
   [junit4]   2> 2970708 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-2-001/cores/collection1],
 dataDir=[hdfs://localhost:33948/solr_hdfs_home/collection1/core_node2/data/]
   [junit4]   2> 2970708 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@558a952a
   [junit4]   2> 2970709 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node2/data/snapshot_metadata
   [junit4]   2> 2970718 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2970718 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2970725 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2970725 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node2/data
   [junit4]   2> 2970746 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node2/data/index
   [junit4]   2> 2970754 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2970754 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2970757 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2970758 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=4, maxMergeAtOnceExplicit=2, maxMergedSegmentMB=1.8095703125, 
floorSegmentMB=1.1337890625, forceMergeDeletesPctAllowed=23.087638926508667, 
segmentsPerTier=23.0, maxCFSSegmentSizeMB=2.0625, noCFSRatio=0.7244014515235152
   [junit4]   2> 2970784 INFO  (IPC Server handler 8 on 33948) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:54396 is 
added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-4ca215dc-b111-4c33-ad88-2a749a87cf1f:NORMAL:127.0.0.1:53188|RBW],
 
ReplicaUC[[DISK]DS-132806c9-e06b-4ef6-a40d-44501a5dca87:NORMAL:127.0.0.1:54396|FINALIZED]]}
 size 0
   [junit4]   2> 2970786 INFO  (IPC Server handler 9 on 33948) [    ] 
BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:53188 is 
added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, 
primaryNodeIndex=-1, 
replicas=[ReplicaUC[[DISK]DS-4ca215dc-b111-4c33-ad88-2a749a87cf1f:NORMAL:127.0.0.1:53188|RBW],
 
ReplicaUC[[DISK]DS-132806c9-e06b-4ef6-a40d-44501a5dca87:NORMAL:127.0.0.1:54396|FINALIZED]]}
 size 0
   [junit4]   2> 2970796 WARN  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 2970862 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2970862 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2970862 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 2970876 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2970876 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2970878 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=20, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=0.6474609375, noCFSRatio=0.0]
   [junit4]   2> 2970892 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@fe5d3a4[collection1] main]
   [junit4]   2> 2970893 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2970894 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2970894 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2970895 INFO  
(searcherExecutor-4965-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@fe5d3a4[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2970909 INFO  
(coreLoadExecutor-4964-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1563933164810797056
   [junit4]   2> 2970924 INFO  
(coreZkRegister-4959-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 c:collection1) [n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 
s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to 
recover:collection1
   [junit4]   2> 2970928 INFO  
(updateExecutor-2027-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 2970937 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 2970937 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 2970937 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 2970937 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
HDFSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 2970938 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] 
as recovering, leader is [http://127.0.0.1:59032/v_ghc/jp/collection1/] and I 
am [http://127.0.0.1:52718/v_ghc/jp/collection1/]
   [junit4]   2> 2970957 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to 
[http://127.0.0.1:59032/v_ghc/jp]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:52718_v_ghc%252Fjp&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 2970976 INFO  (qtp750731099-60625) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.h.a.PrepRecoveryOp Going to wait for 
coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: 
true, onlyIfLeaderActive: true
   [junit4]   2> 2970980 INFO  (qtp750731099-60625) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 
183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 2970980 INFO  (qtp750731099-60625) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.h.a.PrepRecoveryOp In 
WaitForState(recovering): collection=collection1, shard=shard1, 
thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, 
live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:52718_v_ghc%2Fjp, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:52718/v_ghc/jp","node_name":"127.0.0.1:52718_v_ghc%2Fjp","state":"down"}
   [junit4]   2> 2971742 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.SolrTestCaseJ4 
Writing core.properties file to 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-3-001/cores/collection1
   [junit4]   2> 2971743 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-3-001
   [junit4]   2> 2971746 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server 
jetty-9.3.14.v20161028
   [junit4]   2> 2971796 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@59583bab{/v_ghc/jp,null,AVAILABLE}
   [junit4]   2> 2971797 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@4f87b75f{HTTP/1.1,[http/1.1]}{127.0.0.1:49093}
   [junit4]   2> 2971797 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.e.j.s.Server Started 
@2978277ms
   [junit4]   2> 2971797 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://localhost:33948/hdfs__localhost_33948__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-6.5_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001_tempDir-002_jetty3,
 solrconfig=solrconfig.xml, hostContext=/v_ghc/jp, hostPort=49093, 
coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-3-001/cores}
   [junit4]   2> 2971797 ERROR 
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2971810 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
6.5.0
   [junit4]   2> 2971810 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2971810 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2971810 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-04-06T12:48:12.757Z
   [junit4]   2> 2971831 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2971832 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.SolrXmlConfig 
Loading container configuration from 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-3-001/solr.xml
   [junit4]   2> 2971844 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] 
o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: 
socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 2971846 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [    ] o.a.s.c.ZkContainer 
Zookeeper client=127.0.0.1:44301/solr
   [junit4]   2> 2971885 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:49093_v_ghc%2Fjp 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 2971888 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:49093_v_ghc%2Fjp 
   ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 2971901 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:49093_v_ghc%2Fjp 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:49093_v_ghc%2Fjp
   [junit4]   2> 2971903 INFO  
(zkCallback-2024-thread-1-processing-n:127.0.0.1:59032_v_ghc%2Fjp) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 2971903 INFO  
(zkCallback-2030-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp) 
[n:127.0.0.1:52718_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 2971903 INFO  (zkCallback-2019-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2971903 INFO  
(zkCallback-2015-thread-3-processing-n:127.0.0.1:40813_v_ghc%2Fjp) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 2971918 INFO  
(zkCallback-2037-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 2971980 INFO  (qtp750731099-60625) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.h.a.PrepRecoveryOp In 
WaitForState(recovering): collection=collection1, shard=shard1, 
thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, 
live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:52718_v_ghc%2Fjp, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","dataDir":"hdfs://localhost:33948/solr_hdfs_home/collection1/core_node2/data/","base_url":"http://127.0.0.1:52718/v_ghc/jp","node_name":"127.0.0.1:52718_v_ghc%2Fjp","state":"recovering","ulogDir":"hdfs://localhost:33948/solr_hdfs_home/collection1/core_node2/data/tlog"}
   [junit4]   2> 2971981 INFO  (qtp750731099-60625) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.h.a.PrepRecoveryOp Waited 
coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: 
true for: 1 seconds.
   [junit4]   2> 2971981 INFO  (qtp750731099-60625) 
[n:127.0.0.1:59032_v_ghc%2Fjp    ] o.a.s.s.HttpSolrCall [admin] webapp=null 
path=/admin/cores 
params={nodeName=127.0.0.1:52718_v_ghc%252Fjp&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1004
   [junit4]   2> 2972481 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from 
[http://127.0.0.1:59032/v_ghc/jp/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 2972482 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=http://127.0.0.1:52718/v_ghc/jp START 
replicas=[http://127.0.0.1:59032/v_ghc/jp/collection1/] nUpdates=100
   [junit4]   2> 2972489 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:49093_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-3-001/cores
   [junit4]   2> 2972489 INFO  
(TEST-StressHdfsTest.test-seed#[4A8D6E82FB94F27]) [n:127.0.0.1:49093_v_ghc%2Fjp 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2972534 INFO  
(OverseerStateUpdate-97745822413357060-127.0.0.1:40813_v_ghc%2Fjp-n_0000000000) 
[n:127.0.0.1:40813_v_ghc%2Fjp    ] o.a.s.c.o.ReplicaMutator Assigning new node 
to shard shard=shard1
   [junit4]   2> 2972570 INFO  (qtp750731099-60622) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:32.0 
result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, 
maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 2972570 INFO  (qtp750731099-60622) 
[n:127.0.0.1:59032_v_ghc%2Fjp c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.S.Request [collection1]  webapp=/v_ghc/jp path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=33
   [junit4]   2> 2972574 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:3.0 
result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, 
maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 2972574 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a 
PeerSync 
   [junit4]   2> 2972574 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2972574 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping 
IW.commit.
   [junit4]   2> 2972575 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2972575 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was 
successful.
   [junit4]   2> 2972575 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during 
PeerSync.
   [junit4]   2> 2972575 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 2972575 INFO  
(recoveryExecutor-2028-thread-1-processing-n:127.0.0.1:52718_v_ghc%2Fjp 
x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:52718_v_ghc%2Fjp c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 2973567 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 6.5.0
   [junit4]   2> 2973585 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2973742 WARN  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema [collection1] default search field in schema is text. 
WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2973745 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2973778 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection collection1
   [junit4]   2> 2973778 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://localhost:33948/solr_hdfs_home
   [junit4]   2> 2973778 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2973778 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrCore 
solr.RecoveryStrategy.Builder
   [junit4]   2> 2973779 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.StressHdfsTest_4A8D6E82FB94F27-001/shard-3-001/cores/collection1],
 dataDir=[hdfs://localhost:33948/solr_hdfs_home/collection1/core_node3/data/]
   [junit4]   2> 2973779 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@558a952a
   [junit4]   2> 2973779 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node3/data/snapshot_metadata
   [junit4]   2> 2973801 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 2973801 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 2973811 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2973812 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node3/data
   [junit4]   2> 2973852 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://localhost:33948/solr_hdfs_home/collection1/core_node3/data/index
   [junit4]   2> 2973867 INFO  
(coreLoadExecutor-4975-thread-1-processing-n:127.0.0.1:49093_v_ghc%2Fjp) 
[n:127.0.0.1:49093_v_ghc%2Fjp c:collection1   x:collection1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation

[...truncated too long message...]

r.node
   [junit4]   2> 6190708 INFO  
(TEST-TestDistributedStatsComponentCardinality.test-seed#[4A8D6E82FB94F27]) [   
 ] o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@5a47afff{/_,null,UNAVAILABLE}
   [junit4]   2> 6190718 INFO  
(TEST-TestDistributedStatsComponentCardinality.test-seed#[4A8D6E82FB94F27]) [   
 ] o.e.j.s.AbstractConnector Stopped 
ServerConnector@390d44d7{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 6190718 INFO  
(TEST-TestDistributedStatsComponentCardinality.test-seed#[4A8D6E82FB94F27]) [   
 ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=1455731169
   [junit4]   2> 6190719 INFO  (coreCloseExecutor-9972-thread-1) [    
x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore 
org.apache.solr.core.SolrCore@103fae7
   [junit4]   2> 6190751 INFO  (coreCloseExecutor-9972-thread-1) [    
x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: 
solr.core.collection1
   [junit4]   2> 6190752 INFO  
(TEST-TestDistributedStatsComponentCardinality.test-seed#[4A8D6E82FB94F27]) [   
 ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 6190753 INFO  
(TEST-TestDistributedStatsComponentCardinality.test-seed#[4A8D6E82FB94F27]) [   
 ] o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@3f71ac13{/_,null,UNAVAILABLE}
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 
'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=TestDistributedStatsComponentCardinality -Dtests.method=test 
-Dtests.seed=4A8D6E82FB94F27 -Dtests.multiplier=2 -Dtests.nightly=true 
-Dtests.slow=true 
-Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-6.5/test-data/enwiki.random.lines.txt
 -Dtests.locale=sr-Latn-BA -Dtests.timezone=Europe/Vienna -Dtests.asserts=true 
-Dtests.file.encoding=UTF-8
   [junit4] ERROR    462s J2 | TestDistributedStatsComponentCardinality.test <<<
   [junit4]    > Throwable #1: 
com.carrotsearch.randomizedtesting.UncaughtExceptionError: Captured an uncaught 
exception in thread: Thread[id=30575, name=Thread-10783, state=RUNNABLE, 
group=TGRP-TestDistributedStatsComponentCardinality]
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([4A8D6E82FB94F27:8CFCE932814522DF]:0)
   [junit4]    > Caused by: 
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error 
from server at http://127.0.0.1:53396/_/collection1: 
org.apache.solr.client.solrj.SolrServerException: IOException occured when 
talking to server at: http://127.0.0.1:53285/_/collection1
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([4A8D6E82FB94F27]:0)
   [junit4]    >        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:610)
   [junit4]    >        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:279)
   [junit4]    >        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:268)
   [junit4]    >        at 
org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:160)
   [junit4]    >        at 
org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:942)
   [junit4]    >        at 
org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:957)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$5.run(BaseDistributedSearchTestCase.java:627)
   [junit4]   2> 6190757 INFO  
(SUITE-TestDistributedStatsComponentCardinality-seed#[4A8D6E82FB94F27]-worker) 
[    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> 6190757 INFO  
(SUITE-TestDistributedStatsComponentCardinality-seed#[4A8D6E82FB94F27]-worker) 
[    ] o.a.s.SolrTestCaseJ4 
------------------------------------------------------- Done waiting for 
tracked resources to be released
   [junit4]   2> NOTE: test params are: codec=CheapBastard, 
sim=RandomSimilarity(queryNorm=false,coord=yes): {}, locale=sr-Latn-BA, 
timezone=Europe/Vienna
   [junit4]   2> NOTE: Linux 3.13.0-85-generic amd64/Oracle Corporation 
1.8.0_121 (64-bit)/cpus=4,threads=1,free=286324456,total=531103744
   [junit4]   2> NOTE: All tests run in this JVM: [DeleteShardTest, 
TestConfigSetsAPIExclusivity, MetricUtilsTest, DocValuesMultiTest, 
BadComponentTest, OpenExchangeRatesOrgProviderTest, TestFieldCacheSort, 
JSONWriterTest, DistributedQueryComponentCustomSortTest, StatsComponentTest, 
UnloadDistributedZkTest, TestLockTree, TestStressCloudBlindAtomicUpdates, 
TestDFRSimilarityFactory, BadCopyFieldTest, TestShardHandlerFactory, 
TestRTimerTree, TestConfigSets, TestRestManager, AutoCommitTest, 
SimpleMLTQParserTest, DirectUpdateHandlerTest, TestPivotHelperCode, 
LukeRequestHandlerTest, TestSegmentSorting, TestTolerantSearch, 
TestSolrQueryParser, TestUniqueKeyFieldResource, TestUtils, 
TestSolrDeletionPolicy1, CreateCollectionCleanupTest, 
AddSchemaFieldsUpdateProcessorFactoryTest, SolrGraphiteReporterTest, 
TestAnalyzedSuggestions, FacetPivotSmallTest, DirectUpdateHandlerOptimizeTest, 
BadIndexSchemaTest, RuleEngineTest, RecoveryAfterSoftCommitTest, 
PreAnalyzedFieldTest, DistributedVersionInfoTest, BlockJoinFacetSimpleTest, 
TestDistributedMissingSort, TestNoOpRegenerator, 
ConcurrentDeleteAndCreateCollectionTest, SuggesterTest, TestGroupingSearch, 
BasicDistributedZk2Test, TestNamedUpdateProcessors, TestLFUCache, 
TestGraphTermsQParserPlugin, TestExactSharedStatsCache, TestApiFramework, 
TimeZoneUtilsTest, TestChildDocTransformer, TestDynamicFieldResource, 
DistributedFacetExistsSmallTest, TestZkChroot, DistanceUnitsTest, 
DateRangeFieldTest, XmlUpdateRequestHandlerTest, 
TestExclusionRuleCollectionAccess, HighlighterMaxOffsetTest, 
V2ApiIntegrationTest, SolrCLIZkUtilsTest, 
StatelessScriptUpdateProcessorFactoryTest, NoCacheHeaderTest, 
TestCustomDocTransformer, JvmMetricsTest, FullSolrCloudDistribCmdsTest, 
TestFieldTypeCollectionResource, TestCustomSort, TestRequestForwarding, 
SuggesterWFSTTest, ClassificationUpdateProcessorIntegrationTest, 
SecurityConfHandlerTest, ZkStateReaderTest, TestReversedWildcardFilterFactory, 
UtilsToolTest, FieldAnalysisRequestHandlerTest, CircularListTest, 
TestCollapseQParserPlugin, CoreMergeIndexesAdminHandlerTest, 
FieldMutatingUpdateProcessorTest, TestRandomRequestDistribution, 
SolrPluginUtilsTest, TestCoreDiscovery, TestMiniSolrCloudClusterSSL, 
TestSerializedLuceneMatchVersion, DistributedFacetPivotSmallAdvancedTest, 
TestFieldCache, ReplicationFactorTest, TestSchemaManager, TestWriterPerf, 
CollectionTooManyReplicasTest, HardAutoCommitTest, SOLR749Test, 
PrimitiveFieldTypeTest, RequestLoggingTest, TestSolrCloudWithHadoopAuthPlugin, 
CloudExitableDirectoryReaderTest, TestMinMaxOnMultiValuedField, 
MinimalSchemaTest, TestJmxMonitoredMap, TestSolrJ, 
DeleteLastCustomShardedReplicaTest, TestSort, 
TermVectorComponentDistributedTest, TestTolerantUpdateProcessorCloud, 
TestCloudSchemaless, TestConfig, TestInPlaceUpdatesStandalone, 
TestSha256AuthenticationProvider, TestSchemaResource, 
CollectionsAPIDistributedZkTest, OverseerTest, LeaderElectionIntegrationTest, 
BasicZkTest, RecoveryZkTest, TestRandomFaceting, LeaderElectionTest, 
ShardRoutingCustomTest, TestDistributedGrouping, TestRecovery, 
TestStressReorder, TestJoin, TestReload, DistributedTermsComponentTest, 
TestRangeQuery, HighlighterTest, ShowFileRequestHandlerTest, 
CurrencyFieldXmlFileTest, SolrIndexSplitterTest, SimplePostToolTest, 
TestExtendedDismaxParser, CoreAdminHandlerTest, SuggesterTSTTest, 
SpatialFilterTest, PolyFieldTest, WordBreakSolrSpellCheckerTest, 
SchemaVersionSpecificBehaviorTest, SolrCoreCheckLockOnStartupTest, 
TestPseudoReturnFields, SortByFunctionTest, TestRemoteStreaming, 
DistanceFunctionTest, SolrInfoMBeanTest, XsltUpdateRequestHandlerTest, 
CacheHeaderTest, TestQueryTypes, TestValueSourceCache, TestIndexingPerformance, 
IndexSchemaRuntimeFieldTest, LoggingHandlerTest, TestCollationField, 
ReturnFieldsTest, JsonLoaderTest, TestPartialUpdateDeduplication, 
PingRequestHandlerTest, TestMergePolicyConfig, TestSolrDeletionPolicy2, 
MultiTermTest, SampleTest, TestDocSet, TestBinaryField, 
TestElisionMultitermQuery, TestFuzzyAnalyzedSuggestions, 
ExternalFileFieldSortTest, TestSolrCoreProperties, TestPostingsSolrHighlighter, 
DirectSolrConnectionTest, NotRequiredUniqueKeyTest, TestLuceneMatchVersion, 
TestPhraseSuggestions, SpellPossibilityIteratorTest, TestCharFilters, 
SynonymTokenizerTest, TestXIncludeConfig, EchoParamsTest, 
TestPerFieldSimilarity, TestLMJelinekMercerSimilarityFactory, 
TestIBSimilarityFactory, ResourceLoaderTest, TestFastOutputStream, 
ScriptEngineTest, PluginInfoTest, TestFastLRUCache, PrimUtilsTest, 
TestSuggestSpellingConverter, DOMUtilTest, ClusterStateTest, 
TestDocumentBuilder, ZkNodePropsTest, UUIDFieldTest, FileUtilsTest, 
TestRTGBase, CursorPagingTest, SolrTestCaseJ4Test, TestHighlightDedupGrouping, 
TestSimpleTrackingShardHandler, TestEmbeddedSolrServerConstructors, 
TestEmbeddedSolrServerSchemaAPI, ConnectionReuseTest, ActionThrottleTest, 
AssignTest, AsyncCallRequestStatusResponseTest, CdcrBootstrapTest, 
CollectionsAPISolrJTest, DeleteInactiveReplicaTest, DeleteReplicaTest, 
DistribDocExpirationUpdateProcessorTest, HttpPartitionTest, 
LeaderInitiatedRecoveryOnCommitTest, NodeMutatorTest, 
OutOfBoxZkACLAndCredentialsProvidersTest, 
OverriddenZkACLAndCredentialsProvidersTest, 
OverseerCollectionConfigSetProcessorTest, OverseerModifyCollectionTest, 
OverseerStatusTest, PeerSyncReplicationTest, ReplaceNodeTest, SSLMigrationTest, 
SharedFSAutoReplicaFailoverTest, TestClusterProperties, TestConfigSetsAPI, 
TestDownShardTolerantSearch, TestLeaderElectionZkExpiry, 
TestMiniSolrCloudCluster, TestSolrCloudWithDelegationTokens, 
TestSolrCloudWithKerberosAlt, TlogReplayBufferedWhileIndexingTest, 
HdfsNNFailoverTest, HdfsRecoverLeaseTest, HdfsRecoveryZkTest, 
HdfsRestartWhileUpdatingTest, TestReqParamsAPI, TestRestoreCore, 
TestSolrConfigHandlerCloud, DistributedFacetPivotSmallTest, 
SuggestComponentContextFilterQueryTest, 
TestDistributedStatsComponentCardinality]
   [junit4] Completed [654/701 (2!)] on J2 in 462.54s, 1 test, 1 error <<< 
FAILURES!

[...truncated 62836 lines...]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to