Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-8.x/17/

2 tests failed.
FAILED:  
org.apache.solr.cloud.autoscaling.sim.TestSimExtremeIndexing.testScaleUp

Error Message:
{numFound=898550000,start=0,docs=[]} expected:<1000000000> but was:<898550000>

Stack Trace:
java.lang.AssertionError: {numFound=898550000,start=0,docs=[]} 
expected:<1000000000> but was:<898550000>
        at 
__randomizedtesting.SeedInfo.seed([47BB355B266F1DF2:66E573F92A41C353]:0)
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:834)
        at org.junit.Assert.assertEquals(Assert.java:645)
        at 
org.apache.solr.cloud.autoscaling.sim.TestSimExtremeIndexing.testScaleUp(TestSimExtremeIndexing.java:135)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:748)


FAILED:  org.apache.solr.cloud.hdfs.StressHdfsTest.test

Error Message:
Error from server at http://127.0.0.1:41088/nd_j/gn/delete_data_dir: Expected 
mime type application/octet-stream but got application/json. {   "error":{     
"metadata":[       "error-class","org.apache.solr.common.SolrException",       
"root-error-class","java.net.SocketTimeoutException"],     "msg":"Error trying 
to proxy request for url: 
http://127.0.0.1:37946/nd_j/gn/delete_data_dir/update";,     
"trace":"org.apache.solr.common.SolrException: Error trying to proxy request 
for url: http://127.0.0.1:37946/nd_j/gn/delete_data_dir/update\n\tat 
org.apache.solr.servlet.HttpSolrCall.remoteQuery(HttpSolrCall.java:647)\n\tat 
org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:500)\n\tat 
org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:394)\n\tat
 
org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:340)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)\n\tat
 
org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:164)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n\tat
 
org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n\tat
 
org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)\n\tat 
org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)\n\tat
 
org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)\n\tat
 
org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703)\n\tat
 
org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n\tat
 org.eclipse.jetty.server.Server.handle(Server.java:502)\n\tat 
org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364)\n\tat 
org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)\n\tat
 
org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)\n\tat
 org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)\n\tat 
org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)\n\tat 
org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)\n\tat
 
org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)\n\tat
 java.lang.Thread.run(Thread.java:748)\nCaused by: 
java.net.SocketTimeoutException: Read timed out\n\tat 
java.net.SocketInputStream.socketRead0(Native Method)\n\tat 
java.net.SocketInputStream.socketRead(SocketInputStream.java:116)\n\tat 
java.net.SocketInputStream.read(SocketInputStream.java:171)\n\tat 
java.net.SocketInputStream.read(SocketInputStream.java:141)\n\tat 
org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)\n\tat
 
org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153)\n\tat
 
org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:282)\n\tat
 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138)\n\tat
 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56)\n\tat
 
org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259)\n\tat
 
org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163)\n\tat
 
org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:165)\n\tat
 
org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273)\n\tat
 
org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125)\n\tat
 
org.apache.solr.util.stats.InstrumentedHttpRequestExecutor.execute(InstrumentedHttpRequestExecutor.java:120)\n\tat
 
org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272)\n\tat
 
org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)\n\tat
 org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)\n\tat 
org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)\n\tat
 
org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)\n\tat
 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)\n\tat
 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)\n\tat
 org.apache.solr.servlet.HttpSolrCall.remoteQuery(HttpSolrCall.java:619)\n\t... 
28 more\n",     "code":500}} 

Stack Trace:
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error 
from server at http://127.0.0.1:41088/nd_j/gn/delete_data_dir: Expected mime 
type application/octet-stream but got application/json. {
  "error":{
    "metadata":[
      "error-class","org.apache.solr.common.SolrException",
      "root-error-class","java.net.SocketTimeoutException"],
    "msg":"Error trying to proxy request for url: 
http://127.0.0.1:37946/nd_j/gn/delete_data_dir/update";,
    "trace":"org.apache.solr.common.SolrException: Error trying to proxy 
request for url: http://127.0.0.1:37946/nd_j/gn/delete_data_dir/update\n\tat 
org.apache.solr.servlet.HttpSolrCall.remoteQuery(HttpSolrCall.java:647)\n\tat 
org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:500)\n\tat 
org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:394)\n\tat
 
org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:340)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)\n\tat
 
org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:164)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n\tat
 
org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n\tat
 
org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)\n\tat
 
org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)\n\tat 
org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)\n\tat
 
org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)\n\tat
 
org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)\n\tat
 
org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703)\n\tat
 
org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n\tat
 org.eclipse.jetty.server.Server.handle(Server.java:502)\n\tat 
org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364)\n\tat 
org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)\n\tat
 
org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)\n\tat
 org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)\n\tat 
org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)\n\tat 
org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)\n\tat
 
org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)\n\tat
 java.lang.Thread.run(Thread.java:748)\nCaused by: 
java.net.SocketTimeoutException: Read timed out\n\tat 
java.net.SocketInputStream.socketRead0(Native Method)\n\tat 
java.net.SocketInputStream.socketRead(SocketInputStream.java:116)\n\tat 
java.net.SocketInputStream.read(SocketInputStream.java:171)\n\tat 
java.net.SocketInputStream.read(SocketInputStream.java:141)\n\tat 
org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)\n\tat
 
org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153)\n\tat
 
org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:282)\n\tat
 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138)\n\tat
 
org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56)\n\tat
 
org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259)\n\tat
 
org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163)\n\tat
 
org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:165)\n\tat
 
org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273)\n\tat
 
org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125)\n\tat
 
org.apache.solr.util.stats.InstrumentedHttpRequestExecutor.execute(InstrumentedHttpRequestExecutor.java:120)\n\tat
 
org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272)\n\tat
 
org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)\n\tat
 org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)\n\tat 
org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)\n\tat
 
org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)\n\tat
 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)\n\tat
 
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)\n\tat
 org.apache.solr.servlet.HttpSolrCall.remoteQuery(HttpSolrCall.java:619)\n\t... 
28 more\n",
    "code":500}}

        at 
__randomizedtesting.SeedInfo.seed([47BB355B266F1DF2:CFEF0A818893700A]:0)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:614)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:256)
        at 
org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:245)
        at 
org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:207)
        at org.apache.solr.client.solrj.SolrClient.commit(SolrClient.java:504)
        at org.apache.solr.client.solrj.SolrClient.commit(SolrClient.java:479)
        at 
org.apache.solr.cloud.hdfs.StressHdfsTest.createAndDeleteCollection(StressHdfsTest.java:197)
        at 
org.apache.solr.cloud.hdfs.StressHdfsTest.test(StressHdfsTest.java:108)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 13582 lines...]
   [junit4] Suite: org.apache.solr.cloud.hdfs.StressHdfsTest
   [junit4]   2> 314836 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: 
test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/init-core-data-001
   [junit4]   2> 314866 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=13 numCloses=13
   [junit4]   2> 314866 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) 
w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 314868 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: 
@org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 314868 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: 
/nd_j/gn
   [junit4]   2> 319313 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.u.NativeCodeLoader Unable to load native-hadoop library for your 
platform... using builtin-java classes where applicable
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 326826 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried 
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 328274 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 328491 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.Server 
jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 328508 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session 
DefaultSessionIdManager workerName=node0
   [junit4]   2> 328509 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session No 
SessionScavenger set, using defaults
   [junit4]   2> 328509 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session 
node0 Scavenging every 660000ms
   [junit4]   2> 328555 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@7551a956{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 329885 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@4f384cad{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/jetty-lucene2-us-west.apache.org-44066-hdfs-_-any-1596135155560064552.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 329886 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@73aa8d9f{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:44066}
   [junit4]   2> 329886 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.Server 
Started @330152ms
   [junit4]   2> 338628 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.h.s.c.MetricsLoggerTask Metrics logging will not be async since the 
logger is not log4j
   [junit4]   2> 341135 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 341174 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.Server 
jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 341260 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session 
DefaultSessionIdManager workerName=node0
   [junit4]   2> 341260 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session No 
SessionScavenger set, using defaults
   [junit4]   2> 341260 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session 
node0 Scavenging every 600000ms
   [junit4]   2> 341261 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@2cc2e9d2{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 342722 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@3acc372{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-44464-datanode-_-any-8108571928307454523.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 342731 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@48c90e4d{HTTP/1.1,[http/1.1]}{localhost:44464}
   [junit4]   2> 342731 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.Server 
Started @342996ms
   [junit4]   2> 347108 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.h.s.c.MetricsLoggerTask Metrics logging will not be async since the 
logger is not log4j
   [junit4]   2> 348014 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 348015 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.Server 
jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 348031 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session 
DefaultSessionIdManager workerName=node0
   [junit4]   2> 348031 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session No 
SessionScavenger set, using defaults
   [junit4]   2> 348032 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.session 
node0 Scavenging every 600000ms
   [junit4]   2> 348051 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4a9977ed{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 349416 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@3a609d19{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-41092-datanode-_-any-4592622671519344671.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 349451 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@70ca3b7b{HTTP/1.1,[http/1.1]}{localhost:41092}
   [junit4]   2> 349452 INFO  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] o.e.j.s.Server 
Started @349717ms
   [junit4]   2> 349954 WARN  
(SUITE-StressHdfsTest-seed#[47BB355B266F1DF2]-worker) [    ] 
o.a.h.h.s.c.MetricsLoggerTask Metrics logging will not be async since the 
logger is not log4j
   [junit4]   2> 359460 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x3712c58c5ceac63: Processing first storage report for 
DS-77ed73ae-abb9-4837-8e2c-1e33d9a1538e from datanode 
500514d3-0939-4403-ad55-ae3acfd77420
   [junit4]   2> 359492 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x3712c58c5ceac63: from storage 
DS-77ed73ae-abb9-4837-8e2c-1e33d9a1538e node 
DatanodeRegistration(127.0.0.1:44315, 
datanodeUuid=500514d3-0939-4403-ad55-ae3acfd77420, infoPort=36029, 
infoSecurePort=0, ipcPort=38879, 
storageInfo=lv=-57;cid=testClusterID;nsid=1557262187;c=1549575826793), blocks: 
0, hasStaleStorage: true, processing time: 32 msecs, invalidatedBlocks: 0
   [junit4]   2> 359502 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x3712c58c5ceac63: Processing first storage report for 
DS-4e3c5ae3-a4b3-4f04-abe2-94b1c60f3724 from datanode 
500514d3-0939-4403-ad55-ae3acfd77420
   [junit4]   2> 359502 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x3712c58c5ceac63: from storage 
DS-4e3c5ae3-a4b3-4f04-abe2-94b1c60f3724 node 
DatanodeRegistration(127.0.0.1:44315, 
datanodeUuid=500514d3-0939-4403-ad55-ae3acfd77420, infoPort=36029, 
infoSecurePort=0, ipcPort=38879, 
storageInfo=lv=-57;cid=testClusterID;nsid=1557262187;c=1549575826793), blocks: 
0, hasStaleStorage: false, processing time: 2 msecs, invalidatedBlocks: 0
   [junit4]   2> 359588 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x7d5e8312c3dfd90c: Processing first storage report for 
DS-456984f4-a7ab-462e-abd2-8ab2fb9ded2a from datanode 
9225595c-abc4-4603-bc1c-57995b6ac449
   [junit4]   2> 359589 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x7d5e8312c3dfd90c: from storage 
DS-456984f4-a7ab-462e-abd2-8ab2fb9ded2a node 
DatanodeRegistration(127.0.0.1:41017, 
datanodeUuid=9225595c-abc4-4603-bc1c-57995b6ac449, infoPort=43840, 
infoSecurePort=0, ipcPort=44362, 
storageInfo=lv=-57;cid=testClusterID;nsid=1557262187;c=1549575826793), blocks: 
0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
   [junit4]   2> 359589 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x7d5e8312c3dfd90c: Processing first storage report for 
DS-2f4e8c8c-8faf-4e1f-8ca0-553781d423bd from datanode 
9225595c-abc4-4603-bc1c-57995b6ac449
   [junit4]   2> 359589 INFO  (Block report processor) [    ] BlockStateChange 
BLOCK* processReport 0x7d5e8312c3dfd90c: from storage 
DS-2f4e8c8c-8faf-4e1f-8ca0-553781d423bd node 
DatanodeRegistration(127.0.0.1:41017, 
datanodeUuid=9225595c-abc4-4603-bc1c-57995b6ac449, infoPort=43840, 
infoSecurePort=0, ipcPort=44362, 
storageInfo=lv=-57;cid=testClusterID;nsid=1557262187;c=1549575826793), blocks: 
0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 360261 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
STARTING ZK TEST SERVER
   [junit4]   2> 360272 INFO  (ZkTestServer Run Thread) [    ] 
o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 360272 INFO  (ZkTestServer Run Thread) [    ] 
o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 360380 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
start zk server on port:39303
   [junit4]   2> 360380 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
parse host and port list: 127.0.0.1:39303
   [junit4]   2> 360380 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1 39303
   [junit4]   2> 360477 INFO  (zkConnectionManagerCallback-364-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 360605 INFO  (zkConnectionManagerCallback-366-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 360622 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 360623 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 360625 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 360651 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 360686 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 360689 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 360897 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 360898 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 360900 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 360934 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 360935 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkTestServer 
put 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 360936 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly 
asked otherwise
   [junit4]   2> 362300 WARN  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 362300 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 362300 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 362300 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.s.Server 
jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 362376 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.s.session 
DefaultSessionIdManager workerName=node0
   [junit4]   2> 362376 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.s.session No 
SessionScavenger set, using defaults
   [junit4]   2> 362376 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.s.session node0 
Scavenging every 600000ms
   [junit4]   2> 362378 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@5f4c42f0{/nd_j/gn,null,AVAILABLE}
   [junit4]   2> 362412 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@6231a16{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:37047}
   [junit4]   2> 362412 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.s.Server 
Started @362677ms
   [junit4]   2> 362412 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://lucene2-us-west.apache.org:43224/hdfs__lucene2-us-west.apache.org_43224__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-8.x_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001_tempDir-002_control_data,
 hostContext=/nd_j/gn, hostPort=37047, 
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/control-001/cores}
   [junit4]   2> 362413 ERROR 
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 362413 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 362413 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.0.0
   [junit4]   2> 362413 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 362413 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 362413 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-02-07T21:44:25.260Z
   [junit4]   2> 362501 INFO  (zkConnectionManagerCallback-368-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 362580 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 362580 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.SolrXmlConfig 
Loading container configuration from 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/control-001/solr.xml
   [junit4]   2> 362612 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.SolrXmlConfig 
Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 362612 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.SolrXmlConfig 
Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 362613 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.SolrXmlConfig 
MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0, but no JMX 
reporters were configured - adding default JMX reporter.
   [junit4]   2> 362941 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 362956 WARN  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.u.s.S.config No 
Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@48ecf663[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 363026 WARN  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.e.j.u.s.S.config No 
Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@7cb76c58[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 363027 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ZkContainer 
Zookeeper client=127.0.0.1:39303/solr
   [junit4]   2> 363173 INFO  (zkConnectionManagerCallback-374-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 363212 INFO  (zkConnectionManagerCallback-376-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 364362 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.c.OverseerElectionContext I am going to be the leader 
127.0.0.1:37047_nd_j%2Fgn
   [junit4]   2> 364363 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.c.Overseer Overseer 
(id=74149061711691780-127.0.0.1:37047_nd_j%2Fgn-n_0000000000) starting
   [junit4]   2> 364507 INFO  (zkConnectionManagerCallback-383-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 364523 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:39303/solr 
ready
   [junit4]   2> 364545 INFO  
(OverseerStateUpdate-74149061711691780-127.0.0.1:37047_nd_j%2Fgn-n_0000000000) 
[n:127.0.0.1:37047_nd_j%2Fgn    ] o.a.s.c.Overseer Starting to work on the main 
queue : 127.0.0.1:37047_nd_j%2Fgn
   [junit4]   2> 364561 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:37047_nd_j%2Fgn
   [junit4]   2> 364582 INFO  (zkCallback-375-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 364616 INFO  (zkCallback-382-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 364811 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics 
history in memory.
   [junit4]   2> 365345 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 
'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 365503 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 
'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 365504 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 
'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 365505 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/control-001/cores
   [junit4]   2> 366536 INFO  (zkConnectionManagerCallback-389-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 366538 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 366539 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:39303/solr ready
   [junit4]   2> 366610 INFO  (qtp1125027328-1262) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params 
collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:37047_nd_j%252Fgn&wt=javabin&version=2
 and sendToOCPQueue=true
   [junit4]   2> 366751 INFO  
(OverseerThreadFactory-291-thread-1-processing-n:127.0.0.1:37047_nd_j%2Fgn) 
[n:127.0.0.1:37047_nd_j%2Fgn    ] o.a.s.c.a.c.CreateCollectionCmd Create 
collection control_collection
   [junit4]   2> 366978 INFO  (qtp1125027328-1264) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics 
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
 status=0 QTime=0
   [junit4]   2> 367028 INFO  (qtp1125027328-1264) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics 
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
 status=0 QTime=0
   [junit4]   2> 367029 INFO  (qtp1125027328-1264) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics 
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
 status=0 QTime=0
   [junit4]   2> 367030 INFO  (qtp1125027328-1264) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics 
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
 status=0 QTime=0
   [junit4]   2> 367081 INFO  (qtp1125027328-1264) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics 
params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core}
 status=0 QTime=50
   [junit4]   2> 367606 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
   x:control_collection_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core 
create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 367607 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
   x:control_collection_shard1_replica_n1] 
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 
transient cores
   [junit4]   2> 369037 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.0.0
   [junit4]   2> 369292 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
   [junit4]   2> 370663 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 370882 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' 
using configuration from collection control_collection, trusted=true
   [junit4]   2> 370883 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.core.control_collection.shard1.replica_n1' (registry 
'solr.core.control_collection.shard1.replica_n1') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 370919 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory 
solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43224/solr_hdfs_home
   [junit4]   2> 370919 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 370919 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore 
at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/control-001/cores/control_collection_shard1_replica_n1],
 
dataDir=[hdfs://lucene2-us-west.apache.org:43224/solr_hdfs_home/control_collection/core_node2/data/]
   [junit4]   2> 370933 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://lucene2-us-west.apache.org:43224/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
   [junit4]   2> 371024 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 371024 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 371024 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 378101 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 378152 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://lucene2-us-west.apache.org:43224/solr_hdfs_home/control_collection/core_node2/data
   [junit4]   2> 378356 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory creating directory factory for path 
hdfs://lucene2-us-west.apache.org:43224/solr_hdfs_home/control_collection/core_node2/data/index
   [junit4]   2> 378390 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct 
memory allocation set to [true]
   [junit4]   2> 378390 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of 
[8388608] will allocate [1] slabs and use ~[8388608] bytes
   [junit4]   2> 378390 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 378627 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 378627 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.MockRandomMergePolicy: 
org.apache.lucene.index.MockRandomMergePolicy@49695b5c
   [junit4]   2> 383230 WARN  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 384023 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 384023 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH 
numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 384023 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 384234 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 384234 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 384289 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=2, maxMergeAtOnceExplicit=4, maxMergedSegmentMB=27.62890625, 
floorSegmentMB=1.51171875, forceMergeDeletesPctAllowed=19.896867237999892, 
segmentsPerTier=12.0, maxCFSSegmentSizeMB=1.7060546875, noCFSRatio=0.0, 
deletesPctAllowed=31.890946037441687
   [junit4]   2> 385006 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.s.SolrIndexSearcher Opening 
[Searcher@351309be[control_collection_shard1_replica_n1] main]
   [junit4]   2> 385062 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 385079 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 385135 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 385186 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1624848086370091008
   [junit4]   2> 385258 INFO  
(searcherExecutor-296-thread-1-processing-n:127.0.0.1:37047_nd_j%2Fgn 
x:control_collection_shard1_replica_n1 c:control_collection s:shard1) 
[n:127.0.0.1:37047_nd_j%2Fgn c:control_collection s:shard1  
x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore 
[control_collection_shard1_replica_n1] Registered new searcher 
Searcher@351309be[control_collection_shard1_replica_n1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 385330 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ZkShardTerms Successful update of terms at 
/collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, 
version=0}
   [junit4]   2> 385330 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/control_collection/leaders/shard1
   [junit4]   2> 385353 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 385353 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 385353 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.SyncStrategy Sync replicas to 
http://127.0.0.1:37047/nd_j/gn/control_collection_shard1_replica_n1/
   [junit4]   2> 385353 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 385353 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.SyncStrategy 
http://127.0.0.1:37047/nd_j/gn/control_collection_shard1_replica_n1/ has no 
replicas
   [junit4]   2> 385353 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node 
/collections/control_collection/leaders/shard1/leader after winning as 
/collections/control_collection/leader_elect/shard1/election/74149061711691780-core_node2-n_0000000000
   [junit4]   2> 385367 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ShardLeaderElectionContext I am the new leader: 
http://127.0.0.1:37047/nd_j/gn/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 385387 INFO  (zkCallback-375-thread-1) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/control_collection/state.json] for collection 
[control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 385388 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 385389 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
c:control_collection s:shard1  x:control_collection_shard1_replica_n1] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT}
 status=0 QTime=17783
   [junit4]   2> 385536 INFO  (zkCallback-375-thread-2) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/control_collection/state.json] for collection 
[control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 385679 INFO  (qtp1125027328-1262) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at 
most 45 seconds. Check all shard replicas
   [junit4]   2> 385767 INFO  (zkCallback-375-thread-3) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/control_collection/state.json] for collection 
[control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 385767 INFO  (zkCallback-375-thread-2) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/control_collection/state.json] for collection 
[control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 385767 INFO  (zkCallback-375-thread-1) [    ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/control_collection/state.json] for collection 
[control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 385788 INFO  (qtp1125027328-1262) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections 
params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:37047_nd_j%252Fgn&wt=javabin&version=2}
 status=0 QTime=19178
   [junit4]   2> 386059 INFO  (zkConnectionManagerCallback-395-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 386061 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 386062 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:39303/solr ready
   [junit4]   2> 386062 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] o.a.s.c.ChaosMonkey 
monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 386080 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params 
collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2
 and sendToOCPQueue=true
   [junit4]   2> 386133 INFO  
(OverseerCollectionConfigSetProcessor-74149061711691780-127.0.0.1:37047_nd_j%2Fgn-n_0000000000)
 [n:127.0.0.1:37047_nd_j%2Fgn    ] o.a.s.c.OverseerTaskQueue Response ZK path: 
/overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may 
have disconnected from ZooKeeper
   [junit4]   2> 386134 INFO  
(OverseerThreadFactory-291-thread-2-processing-n:127.0.0.1:37047_nd_j%2Fgn) 
[n:127.0.0.1:37047_nd_j%2Fgn    ] o.a.s.c.a.c.CreateCollectionCmd Create 
collection collection1
   [junit4]   2> 386370 WARN  
(OverseerThreadFactory-291-thread-2-processing-n:127.0.0.1:37047_nd_j%2Fgn) 
[n:127.0.0.1:37047_nd_j%2Fgn    ] o.a.s.c.a.c.CreateCollectionCmd It is unusual 
to create a collection (collection1) without cores.
   [junit4]   2> 386442 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at 
most 45 seconds. Check all shard replicas
   [junit4]   2> 386443 INFO  (qtp1125027328-1265) [n:127.0.0.1:37047_nd_j%2Fgn 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections 
params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2}
 status=0 QTime=362
   [junit4]   2> 386476 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances 
pullReplicaCount=0 numOtherReplicas=7
   [junit4]   2> 388170 INFO  
(OverseerCollectionConfigSetProcessor-74149061711691780-127.0.0.1:37047_nd_j%2Fgn-n_0000000000)
 [n:127.0.0.1:37047_nd_j%2Fgn    ] o.a.s.c.OverseerTaskQueue Response ZK path: 
/overseer/collection-queue-work/qnr-0000000002 doesn't exist.  Requestor may 
have disconnected from ZooKeeper
   [junit4]   2> 388551 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-1-001
 of type NRT
   [junit4]   2> 388552 WARN  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 388552 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 388552 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 388553 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 388761 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 388761 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 388761 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 388762 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@13748926{/nd_j/gn,null,AVAILABLE}
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@3693fe19{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:39762}
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.Server Started @389035ms
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://lucene2-us-west.apache.org:43224/hdfs__lucene2-us-west.apache.org_43224__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-8.x_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001_tempDir-002_jetty1,
 replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/nd_j/gn, 
hostPort=39762, 
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-1-001/cores}
   [junit4]   2> 388770 ERROR (closeThreadPool-396-thread-1) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.0.0
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 388770 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-02-07T21:44:51.617Z
   [junit4]   2> 388866 INFO  (zkConnectionManagerCallback-398-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 388915 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 388916 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-1-001/solr.xml
   [junit4]   2> 388933 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay 
is ignored
   [junit4]   2> 388933 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter 
autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 388934 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 390912 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-2-001
 of type NRT
   [junit4]   2> 391112 WARN  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 391112 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 391112 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 391112 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 391128 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 391128 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 391128 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 391128 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4a4750fc{/nd_j/gn,null,AVAILABLE}
   [junit4]   2> 391225 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@15f922a8{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:45700}
   [junit4]   2> 391225 INFO  (closeThreadPool-396-thread-2) [    ] 
o.e.j.s.Server Started @391490ms
   [junit4]   2> 391225 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://lucene2-us-west.apache.org:43224/hdfs__lucene2-us-west.apache.org_43224__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-8.x_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001_tempDir-002_jetty2,
 replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/nd_j/gn, 
hostPort=45700, 
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-2-001/cores}
   [junit4]   2> 391226 ERROR (closeThreadPool-396-thread-2) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 391226 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 391226 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.0.0
   [junit4]   2> 391226 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 391226 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 391226 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-02-07T21:44:54.073Z
   [junit4]   2> 391454 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 391482 INFO  (zkConnectionManagerCallback-400-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 391483 WARN  (closeThreadPool-396-thread-1) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@19e01186[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 391511 WARN  (closeThreadPool-396-thread-1) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@941d7a[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 391565 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:39303/solr
   [junit4]   2> 391643 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 391643 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-2-001/solr.xml
   [junit4]   2> 391696 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay 
is ignored
   [junit4]   2> 391696 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter 
autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 391697 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 391730 INFO  (zkConnectionManagerCallback-406-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 392116 INFO  (zkConnectionManagerCallback-408-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 392169 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (1)
   [junit4]   2> 392223 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.ZkController Publish 
node=127.0.0.1:39762_nd_j%2Fgn as DOWN
   [junit4]   2> 392224 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.TransientSolrCoreCacheDefault 
Allocating transient cache for 4 transient cores
   [junit4]   2> 392224 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:39762_nd_j%2Fgn
   [junit4]   2> 392294 INFO  (zkCallback-375-thread-2) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 392298 INFO  (zkCallback-382-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 392444 INFO  (zkCallback-407-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 392456 INFO  (zkCallback-394-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 392990 INFO  (zkConnectionManagerCallback-415-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 393206 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (2)
   [junit4]   2> 393207 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.s.i.ZkClientClusterStateProvider 
Cluster at 127.0.0.1:39303/solr ready
   [junit4]   2> 393367 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 393625 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 393769 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 393769 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 393858 INFO  (closeThreadPool-396-thread-1) 
[n:127.0.0.1:39762_nd_j%2Fgn    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-1-001/cores
   [junit4]   2> 394010 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-3-001
 of type NRT
   [junit4]   2> 394134 WARN  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 394134 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 394134 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 394134 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 394692 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 394692 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 394692 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 394729 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@1242cfb2{/nd_j/gn,null,AVAILABLE}
   [junit4]   2> 394737 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@5b28b0ef{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:41166}
   [junit4]   2> 394737 INFO  (closeThreadPool-396-thread-3) [    ] 
o.e.j.s.Server Started @395002ms
   [junit4]   2> 394737 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://lucene2-us-west.apache.org:43224/hdfs__lucene2-us-west.apache.org_43224__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-8.x_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001_tempDir-002_jetty3,
 replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/nd_j/gn, 
hostPort=41166, 
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-3-001/cores}
   [junit4]   2> 394738 ERROR (closeThreadPool-396-thread-3) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 394738 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 394738 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.0.0
   [junit4]   2> 394738 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 394738 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 394738 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-02-07T21:44:57.585Z
   [junit4]   2> 394883 INFO  (zkConnectionManagerCallback-418-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 395204 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 395204 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-3-001/solr.xml
   [junit4]   2> 395440 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 395446 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay 
is ignored
   [junit4]   2> 395446 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.SolrXmlConfig Configuration parameter 
autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 395447 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 395455 WARN  (closeThreadPool-396-thread-2) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@34085868[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 395620 WARN  (closeThreadPool-396-thread-2) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@77f9b8dc[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 395622 INFO  (closeThreadPool-396-thread-2) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:39303/solr
   [junit4]   2> 395839 INFO  (zkConnectionManagerCallback-424-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 396386 INFO  (zkConnectionManagerCallback-426-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 396515 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (2)
   [junit4]   2> 396550 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.ZkController Publish 
node=127.0.0.1:45700_nd_j%2Fgn as DOWN
   [junit4]   2> 396551 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.TransientSolrCoreCacheDefault 
Allocating transient cache for 4 transient cores
   [junit4]   2> 396551 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:45700_nd_j%2Fgn
   [junit4]   2> 396569 INFO  (zkCallback-407-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 396569 INFO  (zkCallback-382-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 396570 INFO  (zkCallback-394-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 396584 INFO  (zkCallback-375-thread-2) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 396641 INFO  (zkCallback-414-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 396641 INFO  (zkCallback-425-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 397074 INFO  (zkConnectionManagerCallback-433-thread-1) [    ] 
o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 397132 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (3)
   [junit4]   2> 397133 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.s.i.ZkClientClusterStateProvider 
Cluster at 127.0.0.1:39303/solr ready
   [junit4]   2> 397168 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 397331 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 397406 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 397440 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@671682e0
   [junit4]   2> 397479 INFO  (closeThreadPool-396-thread-2) 
[n:127.0.0.1:45700_nd_j%2Fgn    ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-2-001/cores
   [junit4]   2> 397674 INFO  
(TEST-StressHdfsTest.test-seed#[47BB355B266F1DF2]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 4 in directory 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-4-001
 of type NRT
   [junit4]   2> 397675 WARN  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 397675 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 397675 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 397675 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: 
c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 397824 INFO  (closeThreadPool-396-thread-3) [    ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 397872 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 397873 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 397873 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 397968 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@6ebe7ded{/nd_j/gn,null,AVAILABLE}
   [junit4]   2> 398007 WARN  (closeThreadPool-396-thread-3) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@5be83c50[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 398257 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@79d808be{HTTP/1.1,[http/1.1, 
h2c]}{127.0.0.1:37946}
   [junit4]   2> 398257 INFO  (closeThreadPool-396-thread-1) [    ] 
o.e.j.s.Server Started @398523ms
   [junit4]   2> 398258 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=hdfs://lucene2-us-west.apache.org:43224/hdfs__lucene2-us-west.apache.org_43224__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-8.x_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001_tempDir-002_jetty4,
 replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/nd_j/gn, 
hostPort=37946, 
coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_47BB355B266F1DF2-001/shard-4-001/cores}
   [junit4]   2> 398258 ERROR (closeThreadPool-396-thread-1) [    ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 398258 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 398258 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.0.0
   [junit4]   2> 398258 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 398258 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 398258 INFO  (closeThreadPool-396-thread-1) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-02-07T21:45:01.105Z
   [junit4]   2> 398411 WARN  (closeThreadPool-396-thread-3) [    ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
SslContextFactory@5b73d046[provider=null,keyStore=null,trustStore=nul

[...truncated too long message...]

ce] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/null1402004402
     [copy] Copying 237 files to 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/null1402004402
   [delete] Deleting directory 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-8.x/checkout/solr/null1402004402

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: 
org.eclipse.jgit#org.eclipse.jgit-caller;working
[ivy:cachepath]         confs: [default]
[ivy:cachepath]         found 
org.eclipse.jgit#org.eclipse.jgit;4.6.0.201612231935-r in public
[ivy:cachepath]         found com.jcraft#jsch;0.1.53 in public
[ivy:cachepath]         found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath]         found org.apache.httpcomponents#httpclient;4.3.6 in 
public
[ivy:cachepath]         found org.apache.httpcomponents#httpcore;4.3.3 in public
[ivy:cachepath]         found commons-logging#commons-logging;1.1.3 in public
[ivy:cachepath]         found commons-codec#commons-codec;1.6 in public
[ivy:cachepath]         found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 86ms :: artifacts dl 12ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   8   |   0   |   0   |   0   ||   8   |   0   |
        ---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
[wc-checker] SLF4J: Defaulting to no-operation (NOP) logger implementation
[wc-checker] SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for 
further details.
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 590 minutes 11 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
        at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
        at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
        at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
                at 
hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
                at 
hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
                at hudson.remoting.Channel.call(Channel.java:955)
                at hudson.FilePath.act(FilePath.java:1072)
                at hudson.FilePath.act(FilePath.java:1061)
                at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
                at 
hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
                at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
                at 
hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
                at hudson.model.Build$BuildExecution.post2(Build.java:186)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
                at hudson.model.Run.execute(Run.java:1835)
                at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
                at 
hudson.model.ResourceController.execute(ResourceController.java:97)
                at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
        at hudson.remoting.UserRequest.perform(UserRequest.java:212)
        at hudson.remoting.UserRequest.perform(UserRequest.java:54)
        at hudson.remoting.Request$2.run(Request.java:369)
        at 
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no 
matches found within 10000
        at hudson.FilePath.act(FilePath.java:1074)
        at hudson.FilePath.act(FilePath.java:1061)
        at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
        at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
        at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
        at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
        at hudson.model.Build$BuildExecution.post2(Build.java:186)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
        at hudson.model.Run.execute(Run.java:1835)
        at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
        at hudson.model.ResourceController.execute(ResourceController.java:97)
        at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern 
"**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to