[JENKINS] Lucene-Solr-NightlyTests-7.x - Build # 415 - Still Unstable
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-7.x/415/ 8 tests failed. FAILED: org.apache.solr.cloud.FullSolrCloudDistribCmdsTest.test Error Message: Timeout occured while waiting response from server at: http://127.0.0.1:34414/ms_zdg/control_collection Stack Trace: org.apache.solr.client.solrj.SolrServerException: Timeout occured while waiting response from server at: http://127.0.0.1:34414/ms_zdg/control_collection at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:654) at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:255) at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:244) at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:194) at org.apache.solr.client.solrj.SolrClient.commit(SolrClient.java:504) at org.apache.solr.client.solrj.SolrClient.commit(SolrClient.java:479) at org.apache.solr.cloud.AbstractFullDistribZkTestBase.commit(AbstractFullDistribZkTestBase.java:1701) at org.apache.solr.cloud.FullSolrCloudDistribCmdsTest.testIndexingBatchPerRequestWithHttpSolrClient(FullSolrCloudDistribCmdsTest.java:657) at org.apache.solr.cloud.FullSolrCloudDistribCmdsTest.test(FullSolrCloudDistribCmdsTest.java:153) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1063) at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1035) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at
[JENKINS] Lucene-Solr-master-MacOSX (64bit/jdk1.8.0) - Build # 4988 - Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/4988/ Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseConcMarkSweepGC 1 tests failed. FAILED: junit.framework.TestSuite.org.apache.solr.cloud.ChaosMonkeySafeLeaderWithPullReplicasTest Error Message: ObjectTracker found 5 object(s) that were not released!!! [InternalHttpClient, SolrCore, MMapDirectory, MMapDirectory, MMapDirectory] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.http.impl.client.InternalHttpClient at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:321) at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:330) at org.apache.solr.handler.IndexFetcher.createHttpClient(IndexFetcher.java:225) at org.apache.solr.handler.IndexFetcher.(IndexFetcher.java:267) at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:420) at org.apache.solr.cloud.RecoveryStrategy.replicate(RecoveryStrategy.java:237) at org.apache.solr.cloud.RecoveryStrategy.doReplicateOnlyRecovery(RecoveryStrategy.java:382) at org.apache.solr.cloud.RecoveryStrategy.doRecovery(RecoveryStrategy.java:328) at org.apache.solr.cloud.RecoveryStrategy.run(RecoveryStrategy.java:307) at com.codahale.metrics.InstrumentedExecutorService$InstrumentedRunnable.run(InstrumentedExecutorService.java:176) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.core.SolrCore at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.SolrCore.(SolrCore.java:1054) at org.apache.solr.core.SolrCore.(SolrCore.java:874) at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1191) at org.apache.solr.core.CoreContainer.create(CoreContainer.java:1101) at org.apache.solr.handler.admin.CoreAdminOperation.lambda$static$0(CoreAdminOperation.java:92) at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:360) at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:395) at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:180) at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:199) at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:735) at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:716) at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:496) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:394) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:340) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610) at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:164) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132) at org.eclipse.jetty.server.Server.handle(Server.java:502) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260) at
[jira] [Commented] (SOLR-13085) Apache Solr : Error : unknown field _src_
[ https://issues.apache.org/jira/browse/SOLR-13085?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16730065#comment-16730065 ] Shubhangi Shinde commented on SOLR-13085: - Thank you so much [~erickerickson]. > Apache Solr : Error : unknown field _src_ > - > > Key: SOLR-13085 > URL: https://issues.apache.org/jira/browse/SOLR-13085 > Project: Solr > Issue Type: Bug > Security Level: Public(Default Security Level. Issues are Public) > Components: SolrJ, update >Affects Versions: 7.5 >Reporter: Shubhangi Shinde >Priority: Major > Fix For: 7.5.1 > > Attachments: schema.xml, solrconfig.xml > > > I went through > [this|https://lucene.apache.org/solr/guide/7_5/transforming-and-indexing-custom-json.html#setting-json-defaults] > link and added the code in one of core to upload a multilevel JSON and it > worked very well but when I created another core and added same then it is > throwing the below error. I spent so much time to solve this error but no > luck. The error is, > {{{ "responseHeader":\{ "status":400, "QTime":93}, "error":\{ "metadata":[ > "error-class","org.apache.solr.common.SolrException", > "root-error-class","org.apache.solr.common.SolrException"], "msg":"ERROR: > [doc=5b62d25] unknown field '_src_'", "code":400}} }} > I have added the below code in my solrconfig.xml file. > {{ > _src_ name="mapUniqueKeyOnly">true text > }} > It means my entire JSON will be stored against the key _src_. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS-EA] Lucene-Solr-7.x-Windows (64bit/jdk-12-ea+23) - Build # 926 - Failure!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-Windows/926/ Java: 64bit/jdk-12-ea+23 -XX:+UseCompressedOops -XX:+UseParallelGC All tests passed Build Log: [...truncated 15468 lines...] [junit4] JVM J0: stdout was not empty, see: C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\solr\build\solr-core\test\temp\junit4-J0-20181228_042824_9143890708201348810499.sysout [junit4] >>> JVM J0 emitted unexpected output (verbatim) [junit4] # [junit4] # A fatal error has been detected by the Java Runtime Environment: [junit4] # [junit4] # EXCEPTION_ACCESS_VIOLATION (0xc005) at pc=0x7ffd36b79497, pid=10784, tid=8388 [junit4] # [junit4] # JRE version: OpenJDK Runtime Environment (12.0+23) (build 12-ea+23) [junit4] # Java VM: OpenJDK 64-Bit Server VM (12-ea+23, mixed mode, sharing, tiered, compressed oops, parallel gc, windows-amd64) [junit4] # Problematic frame: [junit4] # V [jvm.dll+0x6b9497] [junit4] # [junit4] # No core dump will be written. Minidumps are not enabled by default on client versions of Windows [junit4] # [junit4] # An error report file with more information is saved as: [junit4] # C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\solr\build\solr-core\test\J0\hs_err_pid10784.log [junit4] # [junit4] # Compiler replay data is saved as: [junit4] # C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\solr\build\solr-core\test\J0\replay_pid10784.log [junit4] # [junit4] # If you would like to submit a bug report, please visit: [junit4] # http://bugreport.java.com/bugreport/crash.jsp [junit4] # [junit4] <<< JVM J0: EOF [...truncated 184 lines...] [junit4] ERROR: JVM J0 ended with an exception, command line: C:\Users\jenkins\tools\java\64bit\jdk-12-ea+23\bin\java.exe -XX:+UseCompressedOops -XX:+UseParallelGC -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\heapdumps -ea -esa --illegal-access=deny -Dtests.prefix=tests -Dtests.seed=8086416AF8D23072 -Xmx512M -Dtests.iters= -Dtests.verbose=false -Dtests.infostream=false -Dtests.codec=random -Dtests.postingsformat=random -Dtests.docvaluesformat=random -Dtests.locale=random -Dtests.timezone=random -Dtests.directory=random -Dtests.linedocsfile=europarl.lines.txt.gz -Dtests.luceneMatchVersion=7.7.0 -Dtests.cleanthreads=perClass -Djava.util.logging.config.file=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\lucene\tools\junit4\logging.properties -Dtests.nightly=false -Dtests.weekly=false -Dtests.monster=false -Dtests.slow=true -Dtests.asserts=true -Dtests.multiplier=1 -DtempDir=./temp -Djava.io.tmpdir=./temp -Dcommon.dir=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\lucene -Dclover.db.dir=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\lucene\build\clover\db -Djava.security.policy=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\lucene\tools\junit4\solr-tests.policy -Dtests.LUCENE_VERSION=7.7.0 -Djetty.testMode=1 -Djetty.insecurerandom=1 -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory -Djava.awt.headless=true -Djdk.map.althashing.threshold=0 -Dtests.src.home=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows -Djava.security.egd=file:/dev/./urandom -Djunit4.childvm.cwd=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\solr\build\solr-core\test\J0 -Djunit4.tempDir=C:\Users\jenkins\workspace\Lucene-Solr-7.x-Windows\solr\build\solr-core\test\temp -Djunit4.childvm.id=0 -Djunit4.childvm.count=2 -Dtests.disableHdfs=true -Djava.security.manager=org.apache.lucene.util.TestSecurityManager -Dtests.filterstacks=true -Dtests.leaveTemporary=false -Dtests.badapples=false -classpath
[JENKINS] Lucene-Solr-repro - Build # 2579 - Still Unstable
Build: https://builds.apache.org/job/Lucene-Solr-repro/2579/ [...truncated 28 lines...] [repro] Jenkins log URL: https://builds.apache.org/job/Lucene-Solr-SmokeRelease-master/1218/consoleText [repro] Revision: 106d300052baf885fc70ed98b92d3b89e1baf1cc [repro] Ant options: -DsmokeTestRelease.java9=/home/jenkins/tools/java/latest1.9 [repro] Repro line: ant test -Dtestcase=BasicAuthIntegrationTest -Dtests.method=testBasicAuth -Dtests.seed=1D48201A37E01EE0 -Dtests.multiplier=2 -Dtests.locale=sr-ME -Dtests.timezone=Europe/Isle_of_Man -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [repro] Repro line: ant test -Dtestcase=TestSimTriggerIntegration -Dtests.method=testNodeAddedTriggerRestoreState -Dtests.seed=1D48201A37E01EE0 -Dtests.multiplier=2 -Dtests.locale=und -Dtests.timezone=Asia/Atyrau -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [repro] git rev-parse --abbrev-ref HEAD [repro] git rev-parse HEAD [repro] Initial local git branch/revision: 24ae0d16cd5446f27309270f6534d4c19fb4fe43 [repro] git fetch [repro] git checkout 106d300052baf885fc70ed98b92d3b89e1baf1cc [...truncated 2 lines...] [repro] git merge --ff-only [...truncated 1 lines...] [repro] ant clean [...truncated 6 lines...] [repro] Test suites by module: [repro]solr/core [repro] BasicAuthIntegrationTest [repro] TestSimTriggerIntegration [repro] ant compile-test [...truncated 3592 lines...] [repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=10 -Dtests.class="*.BasicAuthIntegrationTest|*.TestSimTriggerIntegration" -Dtests.showOutput=onerror -DsmokeTestRelease.java9=/home/jenkins/tools/java/latest1.9 -Dtests.seed=1D48201A37E01EE0 -Dtests.multiplier=2 -Dtests.locale=sr-ME -Dtests.timezone=Europe/Isle_of_Man -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [...truncated 5950 lines...] [repro] Setting last failure code to 256 [repro] Failures: [repro] 0/5 failed: org.apache.solr.security.BasicAuthIntegrationTest [repro] 4/5 failed: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [repro] git checkout 24ae0d16cd5446f27309270f6534d4c19fb4fe43 [...truncated 2 lines...] [repro] Exiting with code 256 [...truncated 5 lines...] - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Comment Edited] (LUCENE-8601) Adding attributes to IndexFieldType
[ https://issues.apache.org/jira/browse/LUCENE-8601?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16730028#comment-16730028 ] Murali Krishna P edited comment on LUCENE-8601 at 12/28/18 4:23 AM: [^LUCENE-8601.05.patch] which retains the attributes during the merge and modifies the PerField formats to not to fail, has passed the Lucene/Solr QA above. was (Author: muralikpbhat): [^LUCENE-8601.05.patch] which retains the attributes during the merge and modifies the PerField formats to ignore them, has passed the Lucene/Solr QA above. > Adding attributes to IndexFieldType > --- > > Key: LUCENE-8601 > URL: https://issues.apache.org/jira/browse/LUCENE-8601 > Project: Lucene - Core > Issue Type: Improvement > Components: core/index >Affects Versions: 7.5 >Reporter: Murali Krishna P >Priority: Major > Attachments: LUCENE-8601.01.patch, LUCENE-8601.02.patch, > LUCENE-8601.03.patch, LUCENE-8601.04.patch, LUCENE-8601.05.patch, > LUCENE-8601.patch > > > Today, we can write a custom Field using custom IndexFieldType, but when the > DefaultIndexingChain converts [IndexFieldType to > FieldInfo|https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java#L662], > only few key informations such as indexing options and doc value type are > retained. The [Codec gets the > FieldInfo|https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java#L90], > but not the type details. > > FieldInfo has support for ['attributes'| > https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java#L47] > and it would be great if we can add 'attributes' to IndexFieldType also and > copy it to FieldInfo's 'attribute'. > > This would allow someone to write a custom codec (extending docvalueformat > for example) for only the 'special field' that he wants and delegate the rest > of the fields to the default codec. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Comment Edited] (LUCENE-8601) Adding attributes to IndexFieldType
[ https://issues.apache.org/jira/browse/LUCENE-8601?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16730028#comment-16730028 ] Murali Krishna P edited comment on LUCENE-8601 at 12/28/18 4:22 AM: [^LUCENE-8601.05.patch] which retains the attributes during the merge and modifies the PerField formats to ignore them, has passed the Lucene/Solr QA above. was (Author: muralikpbhat): [^LUCENE-8601.05.patch] which retains the attributes during the merge and modifies the PerField formats ignore them, has passed the Lucene/Solr QA above. > Adding attributes to IndexFieldType > --- > > Key: LUCENE-8601 > URL: https://issues.apache.org/jira/browse/LUCENE-8601 > Project: Lucene - Core > Issue Type: Improvement > Components: core/index >Affects Versions: 7.5 >Reporter: Murali Krishna P >Priority: Major > Attachments: LUCENE-8601.01.patch, LUCENE-8601.02.patch, > LUCENE-8601.03.patch, LUCENE-8601.04.patch, LUCENE-8601.05.patch, > LUCENE-8601.patch > > > Today, we can write a custom Field using custom IndexFieldType, but when the > DefaultIndexingChain converts [IndexFieldType to > FieldInfo|https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java#L662], > only few key informations such as indexing options and doc value type are > retained. The [Codec gets the > FieldInfo|https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java#L90], > but not the type details. > > FieldInfo has support for ['attributes'| > https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java#L47] > and it would be great if we can add 'attributes' to IndexFieldType also and > copy it to FieldInfo's 'attribute'. > > This would allow someone to write a custom codec (extending docvalueformat > for example) for only the 'special field' that he wants and delegate the rest > of the fields to the default codec. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (LUCENE-8601) Adding attributes to IndexFieldType
[ https://issues.apache.org/jira/browse/LUCENE-8601?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16730028#comment-16730028 ] Murali Krishna P commented on LUCENE-8601: -- [^LUCENE-8601.05.patch] which retains the attributes during the merge and modifies the PerField formats ignore them, has passed the Lucene/Solr QA above. > Adding attributes to IndexFieldType > --- > > Key: LUCENE-8601 > URL: https://issues.apache.org/jira/browse/LUCENE-8601 > Project: Lucene - Core > Issue Type: Improvement > Components: core/index >Affects Versions: 7.5 >Reporter: Murali Krishna P >Priority: Major > Attachments: LUCENE-8601.01.patch, LUCENE-8601.02.patch, > LUCENE-8601.03.patch, LUCENE-8601.04.patch, LUCENE-8601.05.patch, > LUCENE-8601.patch > > > Today, we can write a custom Field using custom IndexFieldType, but when the > DefaultIndexingChain converts [IndexFieldType to > FieldInfo|https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java#L662], > only few key informations such as indexing options and doc value type are > retained. The [Codec gets the > FieldInfo|https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java#L90], > but not the type details. > > FieldInfo has support for ['attributes'| > https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java#L47] > and it would be great if we can add 'attributes' to IndexFieldType also and > copy it to FieldInfo's 'attribute'. > > This would allow someone to write a custom codec (extending docvalueformat > for example) for only the 'special field' that he wants and delegate the rest > of the fields to the default codec. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-BadApples-master-Linux (64bit/jdk-9.0.4) - Build # 141 - Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-BadApples-master-Linux/141/ Java: 64bit/jdk-9.0.4 -XX:+UseCompressedOops -XX:+UseG1GC 1 tests failed. FAILED: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeMarkersRegistration Error Message: Path /autoscaling/nodeAdded/127.0.0.1:10027_solr wasn't created Stack Trace: java.lang.AssertionError: Path /autoscaling/nodeAdded/127.0.0.1:10027_solr wasn't created at __randomizedtesting.SeedInfo.seed([AE068F72BF210EE1:B6BC077EB114C30E]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.assertTrue(Assert.java:41) at org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeMarkersRegistration(TestSimTriggerIntegration.java:888) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:564) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.base/java.lang.Thread.run(Thread.java:844) Build Log: [...truncated 12745 lines...] [junit4] Suite: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [junit4] 2> Creating dataDir:
[JENKINS] Lucene-Solr-master-Windows (64bit/jdk-10.0.1) - Build # 7662 - Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Windows/7662/ Java: 64bit/jdk-10.0.1 -XX:-UseCompressedOops -XX:+UseG1GC 1 tests failed. FAILED: org.apache.solr.cloud.MissingSegmentRecoveryTest.testLeaderRecovery Error Message: Expected a collection with one shard and two replicas Timeout waiting to see state for collection=MissingSegmentRecoveryTest :DocCollection(MissingSegmentRecoveryTest//collections/MissingSegmentRecoveryTest/state.json/7)={ "pullReplicas":"0", "replicationFactor":"2", "shards":{"shard1":{ "range":"8000-7fff", "state":"active", "replicas":{ "core_node3":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n1", "base_url":"http://127.0.0.1:57727/solr;, "node_name":"127.0.0.1:57727_solr", "state":"down", "type":"NRT", "force_set_state":"false"}, "core_node4":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n2", "base_url":"http://127.0.0.1:57728/solr;, "node_name":"127.0.0.1:57728_solr", "state":"active", "type":"NRT", "force_set_state":"false", "leader":"true", "router":{"name":"compositeId"}, "maxShardsPerNode":"1", "autoAddReplicas":"false", "nrtReplicas":"2", "tlogReplicas":"0"} Live Nodes: [127.0.0.1:57727_solr, 127.0.0.1:57728_solr] Last available state: DocCollection(MissingSegmentRecoveryTest//collections/MissingSegmentRecoveryTest/state.json/7)={ "pullReplicas":"0", "replicationFactor":"2", "shards":{"shard1":{ "range":"8000-7fff", "state":"active", "replicas":{ "core_node3":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n1", "base_url":"http://127.0.0.1:57727/solr;, "node_name":"127.0.0.1:57727_solr", "state":"down", "type":"NRT", "force_set_state":"false"}, "core_node4":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n2", "base_url":"http://127.0.0.1:57728/solr;, "node_name":"127.0.0.1:57728_solr", "state":"active", "type":"NRT", "force_set_state":"false", "leader":"true", "router":{"name":"compositeId"}, "maxShardsPerNode":"1", "autoAddReplicas":"false", "nrtReplicas":"2", "tlogReplicas":"0"} Stack Trace: java.lang.AssertionError: Expected a collection with one shard and two replicas Timeout waiting to see state for collection=MissingSegmentRecoveryTest :DocCollection(MissingSegmentRecoveryTest//collections/MissingSegmentRecoveryTest/state.json/7)={ "pullReplicas":"0", "replicationFactor":"2", "shards":{"shard1":{ "range":"8000-7fff", "state":"active", "replicas":{ "core_node3":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n1", "base_url":"http://127.0.0.1:57727/solr;, "node_name":"127.0.0.1:57727_solr", "state":"down", "type":"NRT", "force_set_state":"false"}, "core_node4":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n2", "base_url":"http://127.0.0.1:57728/solr;, "node_name":"127.0.0.1:57728_solr", "state":"active", "type":"NRT", "force_set_state":"false", "leader":"true", "router":{"name":"compositeId"}, "maxShardsPerNode":"1", "autoAddReplicas":"false", "nrtReplicas":"2", "tlogReplicas":"0"} Live Nodes: [127.0.0.1:57727_solr, 127.0.0.1:57728_solr] Last available state: DocCollection(MissingSegmentRecoveryTest//collections/MissingSegmentRecoveryTest/state.json/7)={ "pullReplicas":"0", "replicationFactor":"2", "shards":{"shard1":{ "range":"8000-7fff", "state":"active", "replicas":{ "core_node3":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n1", "base_url":"http://127.0.0.1:57727/solr;, "node_name":"127.0.0.1:57727_solr", "state":"down", "type":"NRT", "force_set_state":"false"}, "core_node4":{ "core":"MissingSegmentRecoveryTest_shard1_replica_n2", "base_url":"http://127.0.0.1:57728/solr;, "node_name":"127.0.0.1:57728_solr", "state":"active", "type":"NRT", "force_set_state":"false", "leader":"true", "router":{"name":"compositeId"}, "maxShardsPerNode":"1", "autoAddReplicas":"false", "nrtReplicas":"2", "tlogReplicas":"0"} at __randomizedtesting.SeedInfo.seed([E469289EA8B772A2:B43CB09DF196C4BF]:0) at org.junit.Assert.fail(Assert.java:88) at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:289) at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:267) at
[jira] [Commented] (SOLR-12028) BadApple and AwaitsFix annotations usage
[ https://issues.apache.org/jira/browse/SOLR-12028?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16730002#comment-16730002 ] ASF subversion and git services commented on SOLR-12028: Commit 53ac8989e25d435f25ca1c78b05384b357e44590 in lucene-solr's branch refs/heads/branch_7x from Erick Erickson [ https://git-wip-us.apache.org/repos/asf?p=lucene-solr.git;h=53ac898 ] SOLR-12028: Catching up with annotations after recent Solr test work (cherry picked from commit 24ae0d16cd5446f27309270f6534d4c19fb4fe43) > BadApple and AwaitsFix annotations usage > > > Key: SOLR-12028 > URL: https://issues.apache.org/jira/browse/SOLR-12028 > Project: Solr > Issue Type: Task > Security Level: Public(Default Security Level. Issues are Public) > Components: Tests >Reporter: Erick Erickson >Assignee: Erick Erickson >Priority: Major > Attachments: SOLR-12016-buildsystem.patch, SOLR-12028-3-Mar.patch, > SOLR-12028-sysprops-reproduce.patch, SOLR-12028.patch, SOLR-12028.patch > > > There's a long discussion of this topic at SOLR-12016. Here's a summary: > - BadApple annotations are used for tests that intermittently fail, say < 30% > of the time. Tests that fail more often shold be moved to AwaitsFix. This is, > of course, a judgement call > - AwaitsFix annotations are used for tests that, for some reason, the problem > can't be fixed immediately. Likely reasons are third-party dependencies, > extreme difficulty tracking down, dependency on another JIRA etc. > Jenkins jobs will typically run with BadApple disabled to cut down on noise. > Periodically Jenkins jobs will be run with BadApples enabled so BadApple > tests won't be lost and reports can be generated. Tests that run with > BadApples disabled that fail require _immediate_ attention. > The default for developers is that BadApple is enabled. > If you are working on one of these tests and cannot get the test to fail > locally, it is perfectly acceptable to comment the annotation out. You should > let the dev list know that this is deliberate. > This JIRA is a placeholder for BadApple tests to point to between the times > they're identified as BadApple and they're either fixed or changed to > AwaitsFix or assigned their own JIRA. > I've assigned this to myself to track so I don't lose track of it. No one > person will fix all of these issues, this will be an ongoing technical debt > cleanup effort. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (SOLR-12028) BadApple and AwaitsFix annotations usage
[ https://issues.apache.org/jira/browse/SOLR-12028?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729998#comment-16729998 ] ASF subversion and git services commented on SOLR-12028: Commit 24ae0d16cd5446f27309270f6534d4c19fb4fe43 in lucene-solr's branch refs/heads/master from Erick Erickson [ https://git-wip-us.apache.org/repos/asf?p=lucene-solr.git;h=24ae0d1 ] SOLR-12028: Catching up with annotations after recent Solr test work > BadApple and AwaitsFix annotations usage > > > Key: SOLR-12028 > URL: https://issues.apache.org/jira/browse/SOLR-12028 > Project: Solr > Issue Type: Task > Security Level: Public(Default Security Level. Issues are Public) > Components: Tests >Reporter: Erick Erickson >Assignee: Erick Erickson >Priority: Major > Attachments: SOLR-12016-buildsystem.patch, SOLR-12028-3-Mar.patch, > SOLR-12028-sysprops-reproduce.patch, SOLR-12028.patch, SOLR-12028.patch > > > There's a long discussion of this topic at SOLR-12016. Here's a summary: > - BadApple annotations are used for tests that intermittently fail, say < 30% > of the time. Tests that fail more often shold be moved to AwaitsFix. This is, > of course, a judgement call > - AwaitsFix annotations are used for tests that, for some reason, the problem > can't be fixed immediately. Likely reasons are third-party dependencies, > extreme difficulty tracking down, dependency on another JIRA etc. > Jenkins jobs will typically run with BadApple disabled to cut down on noise. > Periodically Jenkins jobs will be run with BadApples enabled so BadApple > tests won't be lost and reports can be generated. Tests that run with > BadApples disabled that fail require _immediate_ attention. > The default for developers is that BadApple is enabled. > If you are working on one of these tests and cannot get the test to fail > locally, it is perfectly acceptable to comment the annotation out. You should > let the dev list know that this is deliberate. > This JIRA is a placeholder for BadApple tests to point to between the times > they're identified as BadApple and they're either fixed or changed to > AwaitsFix or assigned their own JIRA. > I've assigned this to myself to track so I don't lose track of it. No one > person will fix all of these issues, this will be an ongoing technical debt > cleanup effort. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-Tests-master - Build # 3098 - Still Unstable
Build: https://builds.apache.org/job/Lucene-Solr-Tests-master/3098/ 2 tests failed. FAILED: junit.framework.TestSuite.org.apache.solr.cloud.TestSolrCloudWithKerberosAlt Error Message: 6 threads leaked from SUITE scope at org.apache.solr.cloud.TestSolrCloudWithKerberosAlt: 1) Thread[id=33768, name=ou=system.data, state=TIMED_WAITING, group=TGRP-TestSolrCloudWithKerberosAlt] at sun.misc.Unsafe.park(Native Method) at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1093) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:809) at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1074) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)2) Thread[id=33767, name=changePwdReplayCache.data, state=TIMED_WAITING, group=TGRP-TestSolrCloudWithKerberosAlt] at sun.misc.Unsafe.park(Native Method) at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1093) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:809) at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1074) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)3) Thread[id=33770, name=pool-28-thread-1, state=TIMED_WAITING, group=TGRP-TestSolrCloudWithKerberosAlt] at sun.misc.Unsafe.park(Native Method) at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460) at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362) at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941) at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)4) Thread[id=33765, name=apacheds, state=WAITING, group=TGRP-TestSolrCloudWithKerberosAlt] at java.lang.Object.wait(Native Method) at java.lang.Object.wait(Object.java:502) at java.util.TimerThread.mainLoop(Timer.java:526) at java.util.TimerThread.run(Timer.java:505)5) Thread[id=33769, name=groupCache.data, state=TIMED_WAITING, group=TGRP-TestSolrCloudWithKerberosAlt] at sun.misc.Unsafe.park(Native Method) at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1093) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:809) at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1074) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)6) Thread[id=33766, name=kdcReplayCache.data, state=TIMED_WAITING, group=TGRP-TestSolrCloudWithKerberosAlt] at sun.misc.Unsafe.park(Native Method) at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1093) at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:809) at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1074)
[JENKINS] Lucene-Solr-master-Solaris (64bit/jdk1.8.0) - Build # 2227 - Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Solaris/2227/ Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseConcMarkSweepGC 2 tests failed. FAILED: org.apache.solr.client.solrj.io.stream.StreamDecoratorTest.testParallelCommitStream Error Message: expected:<5> but was:<3> Stack Trace: java.lang.AssertionError: expected:<5> but was:<3> at __randomizedtesting.SeedInfo.seed([3C15698AAFEB775D:1CFF0B8A33AA9A11]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.failNotEquals(Assert.java:834) at org.junit.Assert.assertEquals(Assert.java:645) at org.junit.Assert.assertEquals(Assert.java:631) at org.apache.solr.client.solrj.io.stream.StreamDecoratorTest.testParallelCommitStream(StreamDecoratorTest.java:3034) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.lang.Thread.run(Thread.java:748) FAILED: org.apache.solr.client.solrj.io.stream.StreamDecoratorTest.testParallelCommitStream Error Message: expected:<5> but
[JENKINS] Lucene-Solr-repro - Build # 2578 - Unstable
Build: https://builds.apache.org/job/Lucene-Solr-repro/2578/ [...truncated 34 lines...] [repro] Jenkins log URL: https://builds.apache.org/job/Lucene-Solr-Tests-master/3097/consoleText [repro] Revision: 106d300052baf885fc70ed98b92d3b89e1baf1cc [repro] Repro line: ant test -Dtestcase=TestSimTriggerIntegration -Dtests.method=testNodeLostTriggerRestoreState -Dtests.seed=C027C5CB4E06A7B8 -Dtests.multiplier=2 -Dtests.slow=true -Dtests.locale=es-CU -Dtests.timezone=CAT -Dtests.asserts=true -Dtests.file.encoding=US-ASCII [repro] git rev-parse --abbrev-ref HEAD [repro] git rev-parse HEAD [repro] Initial local git branch/revision: d018cd18f4470982ffae6e7ff6c7de3ad868bec3 [repro] git fetch [repro] git checkout 106d300052baf885fc70ed98b92d3b89e1baf1cc [...truncated 2 lines...] [repro] git merge --ff-only [...truncated 1 lines...] [repro] ant clean [...truncated 6 lines...] [repro] Test suites by module: [repro]solr/core [repro] TestSimTriggerIntegration [repro] ant compile-test [...truncated 3592 lines...] [repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=5 -Dtests.class="*.TestSimTriggerIntegration" -Dtests.showOutput=onerror -Dtests.seed=C027C5CB4E06A7B8 -Dtests.multiplier=2 -Dtests.slow=true -Dtests.locale=es-CU -Dtests.timezone=CAT -Dtests.asserts=true -Dtests.file.encoding=US-ASCII [...truncated 5576 lines...] [repro] Setting last failure code to 256 [repro] Failures: [repro] 4/5 failed: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [repro] git checkout d018cd18f4470982ffae6e7ff6c7de3ad868bec3 [...truncated 2 lines...] [repro] Exiting with code 256 [...truncated 6 lines...] - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-SmokeRelease-master - Build # 1218 - Failure
Build: https://builds.apache.org/job/Lucene-Solr-SmokeRelease-master/1218/ No tests ran. Build Log: [...truncated 23474 lines...] [asciidoctor:convert] asciidoctor: ERROR: about-this-guide.adoc: line 1: invalid part, must have at least one section (e.g., chapter, appendix, etc.) [asciidoctor:convert] asciidoctor: ERROR: solr-glossary.adoc: line 1: invalid part, must have at least one section (e.g., chapter, appendix, etc.) [java] Processed 2464 links (2015 relative) to 3226 anchors in 248 files [echo] Validated Links & Anchors via: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/solr/build/solr-ref-guide/bare-bones-html/ -dist-changes: [copy] Copying 4 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/solr/package/changes package: -unpack-solr-tgz: -ensure-solr-tgz-exists: [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/solr/build/solr.tgz.unpacked [untar] Expanding: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/solr/package/solr-8.0.0.tgz into /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/solr/build/solr.tgz.unpacked generate-maven-artifacts: resolve: resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0. -ivy-fail-disallowed-ivy-version: ivy-fail: ivy-configure: [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-SmokeRelease-master/lucene/top-level-ivy-settings.xml resolve: ivy-availability-check: [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
[jira] [Assigned] (SOLR-12768) Determine how _nest_path_ should be analyzed to support various use-cases
[ https://issues.apache.org/jira/browse/SOLR-12768?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] David Smiley reassigned SOLR-12768: --- Assignee: David Smiley > Determine how _nest_path_ should be analyzed to support various use-cases > - > > Key: SOLR-12768 > URL: https://issues.apache.org/jira/browse/SOLR-12768 > Project: Solr > Issue Type: Sub-task > Security Level: Public(Default Security Level. Issues are Public) >Reporter: David Smiley >Assignee: David Smiley >Priority: Blocker > Fix For: master (8.0) > > > We know we need {{\_nest\_path\_}} in the schema for the new nested documents > support, and we loosely know what goes in it. From a DocValues perspective, > we've got it down; though we might tweak it. From an indexing (text > analysis) perspective, we're not quite sure yet, though we've got a test > schema, {{schema-nest.xml}} with a decent shot at it. Ultimately, how we > index it will depend on the query/filter use-cases we need to support. So > we'll review some of them here. > TBD: Not sure if the outcome of this task is just a "decide" or wether we > also potentially add a few tests for some of these cases, and/or if we also > add a FieldType to make declaring it as easy as a one-liner. A FieldType > would have other benefits too once we're ready to make querying on the path > easier. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (SOLR-6399) Implement unloadCollection in the Collections API
[ https://issues.apache.org/jira/browse/SOLR-6399?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729865#comment-16729865 ] Gus Heck commented on SOLR-6399: This also seems like it would be useful for A/B testing schema configurations on the same hardware if "unloaded" truly meant no resources were consumed. Imagine one tries a change and it's worse not better. It would be nice to be able to delete the experiment and simply reload the old collection. If the change is good delete (or back up and archive) the old one... saves standing up extra hardware, or re-indexing or backup/restore of the old collection when the experiment fails. The bigger the cluster and the bigger the data set the more useful this is. > Implement unloadCollection in the Collections API > - > > Key: SOLR-6399 > URL: https://issues.apache.org/jira/browse/SOLR-6399 > Project: Solr > Issue Type: New Feature >Reporter: dfdeshom >Assignee: Shalin Shekhar Mangar >Priority: Major > Fix For: 6.0 > > > There is currently no way to unload a collection without deleting its > contents. There should be a way in the collections API to unload a collection > and reload it later, as needed. > A use case for this is the following: you store logs by day, with each day > having its own collection. You are required to store up to 2 years of data, > which adds up to 730 collections. Most of the time, you'll want to have 3 > days of data loaded for search. Having just 3 collections loaded into memory, > instead of 730 will make managing Solr easier. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-master-Linux (64bit/jdk1.8.0_172) - Build # 23408 - Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Linux/23408/ Java: 64bit/jdk1.8.0_172 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC 1 tests failed. FAILED: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeLostTriggerRestoreState Error Message: The trigger did not fire at all Stack Trace: java.lang.AssertionError: The trigger did not fire at all at __randomizedtesting.SeedInfo.seed([9B06BCFCD212A5D5:B0F969A7486AB005]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.assertTrue(Assert.java:41) at org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeLostTriggerRestoreState(TestSimTriggerIntegration.java:332) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.lang.Thread.run(Thread.java:748) Build Log: [...truncated 13147 lines...] [junit4] Suite: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [junit4] 2> Creating dataDir:
[jira] [Updated] (LUCENE-8624) ByteBuffersDataOutput Integer Overflow
[ https://issues.apache.org/jira/browse/LUCENE-8624?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8624: --- Description: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { int fullBlockSize = (blockCount - 1) * blockSize(); // throws integer overflow int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { long fullBlockSize = 1L * (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} Thanks was: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { int fullBlockSize = (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { long fullBlockSize = 1L * (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size =
[jira] [Updated] (LUCENE-8625) Integer Overflow in ByteBuffersDataInput
[ https://issues.apache.org/jira/browse/LUCENE-8625?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8625: --- Description: Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: Exception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) The exception is caused by a Math.toIntExact in sliceBufferList in ByteBuffersDataInput. {code:java|title=ByteBuffersDataInput.java|borderStyle=solid} private static List sliceBufferList(List buffers, long offset, long length) { ensureAssumptions(buffers); if (buffers.size() == 1) { ByteBuffer cloned = buffers.get(0).asReadOnlyBuffer(); cloned.position(Math.toIntExact(cloned.position() + offset)); cloned.limit(Math.toIntExact(length + cloned.position())); return Arrays.asList(cloned); } else { long absStart = buffers.get(0).position() + offset; long absEnd = Math.toIntExact(absStart + length); // throws integer overflow ... {code} Removing the Math.toIntExact works but I'm not sure if the logic will still be right since absEnd is used to calculate endOffset after a few lines: {code:java} int endOffset = (int) absEnd & blockMask; {code} Thanks, was: Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: Exception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at
[jira] [Updated] (LUCENE-8625) Integer Overflow in ByteBuffersDataInput
[ https://issues.apache.org/jira/browse/LUCENE-8625?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8625: --- Description: Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: Exception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) The exception is caused by a Math.toIntExact made in sliceBufferList in ByteBuffersDataInput. {code:java|title=ByteBuffersDataInput.java|borderStyle=solid} private static List sliceBufferList(List buffers, long offset, long length) { ensureAssumptions(buffers); if (buffers.size() == 1) { ByteBuffer cloned = buffers.get(0).asReadOnlyBuffer(); cloned.position(Math.toIntExact(cloned.position() + offset)); cloned.limit(Math.toIntExact(length + cloned.position())); return Arrays.asList(cloned); } else { long absStart = buffers.get(0).position() + offset; long absEnd = Math.toIntExact(absStart + length); // throws integer overflow ... {code} Removing the Math.toIntExact works but I'm not sure if the logic will still be right since absEnd is used to calculate endOffset after a few lines: {code:java} int endOffset = (int) absEnd & blockMask; {code} Thanks, was: Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: xception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at
[jira] [Updated] (LUCENE-8625) Integer Overflow in ByteBuffersDataInput
[ https://issues.apache.org/jira/browse/LUCENE-8625?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8625: --- Summary: Integer Overflow in ByteBuffersDataInput (was: Integer Overflow Exception in ByteBuffersDataInpu) > Integer Overflow in ByteBuffersDataInput > > > Key: LUCENE-8625 > URL: https://issues.apache.org/jira/browse/LUCENE-8625 > Project: Lucene - Core > Issue Type: Bug > Components: core/store >Affects Versions: 7.5 >Reporter: Mulugeta Mammo >Priority: Major > Fix For: 7.5 > > > Hi, > Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, > I encountered another Integer Overflow error in ByteBuffersDataInput: > xception in thread "Lucene Merge Thread #1540" Exception in thread "main" > org.apache.lucene.index.MergePolicy$MergeException: > java.lang.ArithmeticException: integer overflow > at > org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) > at > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) > Caused by: java.lang.ArithmeticException: integer overflow > at java.lang.Math.toIntExact(Math.java:1011) > at > org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) > at > org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) > at > org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) > at > org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) > at > org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) > at > org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) > at > org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) > at > org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) > at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) > at > org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) > at > org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) > at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) > at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) > at > org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) > at > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) > The exception is caused by a Math.toIntExact made in sliceBufferList in > ByteBuffersDataInput. > {code:java|title=ByteBuffersDataInput.java|borderStyle=solid} > private static List sliceBufferList(List buffers, > long offset, long length) { > ensureAssumptions(buffers); > if (buffers.size() == 1) { > ByteBuffer cloned = buffers.get(0).asReadOnlyBuffer(); > cloned.position(Math.toIntExact(cloned.position() + offset)); > cloned.limit(Math.toIntExact(length + cloned.position())); > return Arrays.asList(cloned); > } else { > long absStart = buffers.get(0).position() + offset; > long absEnd = Math.toIntExact(absStart + length); // throws integer > overflow > ... > {code} > Removing the Math.toIntExact works but I'm not sure if the logic will still > be right since absEnd is used to calculate endOffset after a few lines: > {code:java} > int endOffset = (int) absEnd & blockMask; > {code} > > Thanks, -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Updated] (LUCENE-8625) Integer Overflow Exception in ByteBuffersDataInpu
[ https://issues.apache.org/jira/browse/LUCENE-8625?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8625: --- Description: Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: xception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) The exception is caused by a Math.toIntExact made in sliceBufferList in ByteBuffersDataInput. {code:java|title=ByteBuffersDataInput.java|borderStyle=solid} private static List sliceBufferList(List buffers, long offset, long length) { ensureAssumptions(buffers); if (buffers.size() == 1) { ByteBuffer cloned = buffers.get(0).asReadOnlyBuffer(); cloned.position(Math.toIntExact(cloned.position() + offset)); cloned.limit(Math.toIntExact(length + cloned.position())); return Arrays.asList(cloned); } else { long absStart = buffers.get(0).position() + offset; long absEnd = Math.toIntExact(absStart + length); // throws integer overflow ... {code} Removing the Math.toIntExact works but I'm not sure if the logic will still be right since absEnd is used to calculate endOffset after a few lines: {code:java} int endOffset = (int) absEnd & blockMask; {code} Thanks, was: Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: xception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at
[jira] [Created] (LUCENE-8625) Integer Overflow Exception in ByteBuffersDataInpu
Mulugeta Mammo created LUCENE-8625: -- Summary: Integer Overflow Exception in ByteBuffersDataInpu Key: LUCENE-8625 URL: https://issues.apache.org/jira/browse/LUCENE-8625 Project: Lucene - Core Issue Type: Bug Components: core/store Affects Versions: 7.5 Reporter: Mulugeta Mammo Fix For: 7.5 Hi, Once I fixed the bug here, https://issues.apache.org/jira/browse/LUCENE-8624, I encountered another Integer Overflow error in ByteBuffersDataInput: xception in thread "Lucene Merge Thread #1540" Exception in thread "main" org.apache.lucene.index.MergePolicy$MergeException: java.lang.ArithmeticException: integer overflow at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) Caused by: java.lang.ArithmeticException: integer overflow at java.lang.Math.toIntExact(Math.java:1011) at org.apache.lucene.store.ByteBuffersDataInput.sliceBufferList(ByteBuffersDataInput.java:299) at org.apache.lucene.store.ByteBuffersDataInput.slice(ByteBuffersDataInput.java:223) at org.apache.lucene.store.ByteBuffersIndexInput.clone(ByteBuffersIndexInput.java:186) at org.apache.lucene.store.ByteBuffersDirectory$FileEntry.openInput(ByteBuffersDirectory.java:254) at org.apache.lucene.store.ByteBuffersDirectory.openInput(ByteBuffersDirectory.java:223) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.FilterDirectory.openInput(FilterDirectory.java:100) at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:157) at org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat.write(Lucene50CompoundFormat.java:89) at org.apache.lucene.index.IndexWriter.createCompoundFile(IndexWriter.java:5004) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4517) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) The exception is caused by a Math.toIntExact made in sliceBufferList in ByteBuffersDataInput. {{{code:title=ByteBuffersDataInput.java|borderStyle=solid}}} private static List sliceBufferList(List buffers, long offset, long length) { ensureAssumptions(buffers); if (buffers.size() == 1) { ByteBuffer cloned = buffers.get(0).asReadOnlyBuffer(); cloned.position(Math.toIntExact(cloned.position() + offset)); cloned.limit(Math.toIntExact(length + cloned.position())); return Arrays.asList(cloned); } else { long absStart = buffers.get(0).position() + offset; long absEnd = Math.toIntExact(absStart + length); // throws integer overflow ... {{{code}}} Removing the Math.toIntExact works but I'm not sure if the logic will still be right since absEnd is used to calculate endOffset: {code:java} int endOffset = (int) absEnd & blockMask; {code} Thanks, -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Created] (LUCENE-8624) ByteBuffersDataOutput Integer Overflow
Mulugeta Mammo created LUCENE-8624: -- Summary: ByteBuffersDataOutput Integer Overflow Key: LUCENE-8624 URL: https://issues.apache.org/jira/browse/LUCENE-8624 Project: Lucene - Core Issue Type: Bug Components: core/store Affects Versions: 7.5 Reporter: Mulugeta Mammo Fix For: 7.5 Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: {{}}Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {{{code:title=ByteBuffersDataOutput.java|borderStyle=solid} }} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}int fullBlockSize = (blockCount - 1) * blockSize();{color} int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {{{code:title=ByteBuffersDataOutput.java|borderStyle=solid}}} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}long fullBlockSize = 1L * (blockCount - 1) * blockSize();{color} int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} Thanks -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Updated] (LUCENE-8624) ByteBuffersDataOutput Integer Overflow
[ https://issues.apache.org/jira/browse/LUCENE-8624?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8624: --- Description: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: {{}}Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { int fullBlockSize = (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { long fullBlockSize = 1L * (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} Thanks was: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: {{}}Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} }} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}int fullBlockSize = (blockCount - 1) * blockSize();{color} int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} }} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}long fullBlockSize = 1L * (blockCount - 1) * blockSize();{color} int lastBlockSize =
[jira] [Updated] (LUCENE-8624) ByteBuffersDataOutput Integer Overflow
[ https://issues.apache.org/jira/browse/LUCENE-8624?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8624: --- Description: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { int fullBlockSize = (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { long fullBlockSize = 1L * (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} Thanks was: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: {{}}Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { int fullBlockSize = (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { long fullBlockSize = 1L * (blockCount - 1) * blockSize(); int lastBlockSize = blocks.getLast().position(); size = fullBlockSize +
[jira] [Updated] (LUCENE-8624) ByteBuffersDataOutput Integer Overflow
[ https://issues.apache.org/jira/browse/LUCENE-8624?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Mulugeta Mammo updated LUCENE-8624: --- Description: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: {{}}Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} }} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}int fullBlockSize = (blockCount - 1) * blockSize();{color} int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {code:java|title=ByteBuffersDataOutput.java|borderStyle=solid} }} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}long fullBlockSize = 1L * (blockCount - 1) * blockSize();{color} int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} Thanks was: Hi, When indexing large data sets with ByteBuffersDirectory, an exception like the below is thrown: {{}}Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294888321) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:182) at org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) at org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) at org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) at org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) at org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) at org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) at org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) {{The exception is caused by an integer overflow while calling getFilePointer() in Lucene50PostingsWriter, which eventually calls the size() method in ByteBuffersDataOutput.}} {{{code:title=ByteBuffersDataOutput.java|borderStyle=solid} }} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}int fullBlockSize = (blockCount - 1) * blockSize();{color} int lastBlockSize = blocks.getLast().position(); size = fullBlockSize + lastBlockSize; } return size; } {code} In my case, I had a blockCount = 65 and a blockSize() = 33554432 which overflows fullBlockSize. The fix: {{{code:title=ByteBuffersDataOutput.java|borderStyle=solid}}} public long size() { long size = 0; int blockCount = blocks.size(); if (blockCount >= 1) { {color:#FF}long fullBlockSize = 1L * (blockCount - 1)
[jira] [Commented] (SOLR-13088) Add zplot Stream Evaluator to plot math expressions in Apache Zeppelin
[ https://issues.apache.org/jira/browse/SOLR-13088?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729838#comment-16729838 ] ASF subversion and git services commented on SOLR-13088: Commit c236ad8a18a6d48a5d1be321fe22865052929064 in lucene-solr's branch refs/heads/branch_7x from Joel Bernstein [ https://git-wip-us.apache.org/repos/asf?p=lucene-solr.git;h=c236ad8 ] SOLR-13088: Add zplot Stream Evaluator to plot math expressions in Apache Zeppelin > Add zplot Stream Evaluator to plot math expressions in Apache Zeppelin > -- > > Key: SOLR-13088 > URL: https://issues.apache.org/jira/browse/SOLR-13088 > Project: Solr > Issue Type: New Feature > Security Level: Public(Default Security Level. Issues are Public) >Reporter: Joel Bernstein >Assignee: Joel Bernstein >Priority: Major > Attachments: SOLR-13088.patch, SOLR-13088.patch, SOLR-13088.patch, > Screen Shot 2018-12-21 at 5.53.18 PM.png, Screen Shot 2018-12-22 at 4.04.41 > PM.png > > > The Solr Zeppelin interpreter ([https://github.com/lucidworks/zeppelin-solr]) > can already execute Streaming Expressions and therefore Math Expressions. > The *zplot* function will export the results of Solr Math Expressions in a > format the Solr Zeppelin interpreter can work with. This will allow results > of Solr Math Expressions to be plotted by *Apache Zeppelin.* > Sample syntax: > {code:java} > let(a=array(1,2,3), > b=array(4,5,6), > zplot(line1=a, line2=b, linec=array(7,8,9))){code} -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (SOLR-13088) Add zplot Stream Evaluator to plot math expressions in Apache Zeppelin
[ https://issues.apache.org/jira/browse/SOLR-13088?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729825#comment-16729825 ] ASF subversion and git services commented on SOLR-13088: Commit d018cd18f4470982ffae6e7ff6c7de3ad868bec3 in lucene-solr's branch refs/heads/master from Joel Bernstein [ https://git-wip-us.apache.org/repos/asf?p=lucene-solr.git;h=d018cd1 ] SOLR-13088: Add zplot Stream Evaluator to plot math expressions in Apache Zeppelin > Add zplot Stream Evaluator to plot math expressions in Apache Zeppelin > -- > > Key: SOLR-13088 > URL: https://issues.apache.org/jira/browse/SOLR-13088 > Project: Solr > Issue Type: New Feature > Security Level: Public(Default Security Level. Issues are Public) >Reporter: Joel Bernstein >Assignee: Joel Bernstein >Priority: Major > Attachments: SOLR-13088.patch, SOLR-13088.patch, SOLR-13088.patch, > Screen Shot 2018-12-21 at 5.53.18 PM.png, Screen Shot 2018-12-22 at 4.04.41 > PM.png > > > The Solr Zeppelin interpreter ([https://github.com/lucidworks/zeppelin-solr]) > can already execute Streaming Expressions and therefore Math Expressions. > The *zplot* function will export the results of Solr Math Expressions in a > format the Solr Zeppelin interpreter can work with. This will allow results > of Solr Math Expressions to be plotted by *Apache Zeppelin.* > Sample syntax: > {code:java} > let(a=array(1,2,3), > b=array(4,5,6), > zplot(line1=a, line2=b, linec=array(7,8,9))){code} -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-Tests-master - Build # 3097 - Still Unstable
Build: https://builds.apache.org/job/Lucene-Solr-Tests-master/3097/ 1 tests failed. FAILED: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeLostTriggerRestoreState Error Message: The trigger did not fire at all Stack Trace: java.lang.AssertionError: The trigger did not fire at all at __randomizedtesting.SeedInfo.seed([C027C5CB4E06A7B8:EBD81090D47EB268]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.assertTrue(Assert.java:41) at org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeLostTriggerRestoreState(TestSimTriggerIntegration.java:332) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.lang.Thread.run(Thread.java:748) Build Log: [...truncated 12606 lines...] [junit4] Suite: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [junit4] 2> Creating dataDir:
[JENKINS] Lucene-Solr-repro - Build # 2576 - Still Unstable
Build: https://builds.apache.org/job/Lucene-Solr-repro/2576/ [...truncated 33 lines...] [repro] Jenkins log URL: https://builds.apache.org/job/Lucene-Solr-Tests-master/3096/consoleText [repro] Revision: 106d300052baf885fc70ed98b92d3b89e1baf1cc [repro] Repro line: ant test -Dtestcase=TestSimTriggerIntegration -Dtests.method=testNodeAddedTriggerRestoreState -Dtests.seed=83616FE74B9B0F2F -Dtests.multiplier=2 -Dtests.slow=true -Dtests.locale=cs-CZ -Dtests.timezone=America/Edmonton -Dtests.asserts=true -Dtests.file.encoding=US-ASCII [repro] git rev-parse --abbrev-ref HEAD [repro] git rev-parse HEAD [repro] Initial local git branch/revision: 106d300052baf885fc70ed98b92d3b89e1baf1cc [repro] git fetch [repro] git checkout 106d300052baf885fc70ed98b92d3b89e1baf1cc [...truncated 1 lines...] [repro] git merge --ff-only [...truncated 1 lines...] [repro] ant clean [...truncated 6 lines...] [repro] Test suites by module: [repro]solr/core [repro] TestSimTriggerIntegration [repro] ant compile-test [...truncated 3592 lines...] [repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=5 -Dtests.class="*.TestSimTriggerIntegration" -Dtests.showOutput=onerror -Dtests.seed=83616FE74B9B0F2F -Dtests.multiplier=2 -Dtests.slow=true -Dtests.locale=cs-CZ -Dtests.timezone=America/Edmonton -Dtests.asserts=true -Dtests.file.encoding=US-ASCII [...truncated 7276 lines...] [repro] Setting last failure code to 256 [repro] Failures: [repro] 5/5 failed: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [repro] Re-testing 100% failures at the tip of master [repro] git fetch [repro] git checkout master [...truncated 4 lines...] [repro] git merge --ff-only [...truncated 6 lines...] [repro] ant clean [...truncated 8 lines...] [repro] Test suites by module: [repro]solr/core [repro] TestSimTriggerIntegration [repro] ant compile-test [...truncated 3592 lines...] [repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=5 -Dtests.class="*.TestSimTriggerIntegration" -Dtests.showOutput=onerror -Dtests.seed=83616FE74B9B0F2F -Dtests.multiplier=2 -Dtests.slow=true -Dtests.locale=cs-CZ -Dtests.timezone=America/Edmonton -Dtests.asserts=true -Dtests.file.encoding=US-ASCII [...truncated 5875 lines...] [repro] Setting last failure code to 256 [repro] Failures at the tip of master: [repro] 4/5 failed: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [repro] git checkout 106d300052baf885fc70ed98b92d3b89e1baf1cc [...truncated 8 lines...] [repro] Exiting with code 256 [...truncated 5 lines...] - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-BadApples-Tests-7.x - Build # 249 - Failure
Build: https://builds.apache.org/job/Lucene-Solr-BadApples-Tests-7.x/249/ No tests ran. Build Log: [...truncated 49262 lines...] ERROR: command execution failed. ERROR: Step ‘Archive the artifacts’ failed: no workspace for Lucene-Solr-BadApples-Tests-7.x #249 ERROR: Step ‘Publish JUnit test result report’ failed: no workspace for Lucene-Solr-BadApples-Tests-7.x #249 ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) Email was triggered for: Failure - Any Sending email for trigger: Failure - Any ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) ERROR: lucene2 is offline; cannot locate JDK 1.8 (latest) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (SOLR-13091) REBALANCELEADERS is broken
[ https://issues.apache.org/jira/browse/SOLR-13091?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729729#comment-16729729 ] Erick Erickson commented on SOLR-13091: --- This is a test-only patch illustrating the problem with distributing any shardUnique property. It doesn't matter whether the property is an arbitrary one with the shardUnique flag explicitly set or preferredLeader without shardUnique set. It doesn't matter whether the property is capitalized or not. Unfortunately, it doesn't reproduce with a particular seed, It does fail about 25% of the time when I beasted it overnight. What's particularly interesting is that when the test runs, it uses one of two methods to set the property, one with the admin API and one just making a request setting parameters. ONLY the one setting parameters fails, see: {code:java} setPropWithStandardRequest{code} All the failures are in: testBalancePropertySliceUnique I'll be digging more later, but wanted to preserve this. NOTE: the other test that sets a shardUnique property manually (and repeatedly) using one of those two methods works all the time, it's only the test that tries to have the properties distributed manually that fails. The test code is not complete, has nocommits in it and the like as well as extraneous output What you get is a shard with the supposedly unique property twice, e.g. {quote}"shard2":{ [junit4] > "range":"c000-", [junit4] > "state":"active", [junit4] > "replicas":{ [junit4] > "core_node10": Unknown macro: \{ [junit4] > "core"} , [junit4] > "core_node11": Unknown macro: \{ [junit4] > "core"} , [junit4] > "core_node13": Unknown macro: \{ [junit4] > "core"} }}, {quote} > REBALANCELEADERS is broken > -- > > Key: SOLR-13091 > URL: https://issues.apache.org/jira/browse/SOLR-13091 > Project: Solr > Issue Type: Bug > Security Level: Public(Default Security Level. Issues are Public) > Components: SolrCloud >Affects Versions: 7.6 >Reporter: Erick Erickson >Assignee: Erick Erickson >Priority: Major > Attachments: SOLR-13091.patch > > > From the user's list, the REBALANCELEADERS API doesn't seem to work > correctly. Manual testing shows the problem. > It's disturbing that the unit test doesn't catch this. That'll be the first > thing to fix. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Updated] (SOLR-13091) REBALANCELEADERS is broken
[ https://issues.apache.org/jira/browse/SOLR-13091?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Erick Erickson updated SOLR-13091: -- Attachment: SOLR-13091.patch > REBALANCELEADERS is broken > -- > > Key: SOLR-13091 > URL: https://issues.apache.org/jira/browse/SOLR-13091 > Project: Solr > Issue Type: Bug > Security Level: Public(Default Security Level. Issues are Public) > Components: SolrCloud >Affects Versions: 7.6 >Reporter: Erick Erickson >Assignee: Erick Erickson >Priority: Major > Attachments: SOLR-13091.patch > > > From the user's list, the REBALANCELEADERS API doesn't seem to work > correctly. Manual testing shows the problem. > It's disturbing that the unit test doesn't catch this. That'll be the first > thing to fix. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-repro - Build # 2575 - Still unstable
Build: https://builds.apache.org/job/Lucene-Solr-repro/2575/ [...truncated 28 lines...] [repro] Jenkins log URL: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1733/consoleText [repro] Revision: 106d300052baf885fc70ed98b92d3b89e1baf1cc [repro] Ant options: -Dtests.multiplier=2 -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt [repro] Repro line: ant test -Dtestcase=HdfsRestartWhileUpdatingTest -Dtests.method=test -Dtests.seed=E2D2198E70FF6BB6 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=ro -Dtests.timezone=Iceland -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [repro] Repro line: ant test -Dtestcase=ChaosMonkeySafeLeaderWithPullReplicasTest -Dtests.seed=E2D2198E70FF6BB6 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=tr-TR -Dtests.timezone=Etc/GMT+8 -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [repro] Repro line: ant test -Dtestcase=RestartWhileUpdatingTest -Dtests.method=test -Dtests.seed=E2D2198E70FF6BB6 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=mk-MK -Dtests.timezone=America/Rosario -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [repro] git rev-parse --abbrev-ref HEAD [repro] git rev-parse HEAD [repro] Initial local git branch/revision: 106d300052baf885fc70ed98b92d3b89e1baf1cc [repro] git fetch [repro] git checkout 106d300052baf885fc70ed98b92d3b89e1baf1cc [...truncated 1 lines...] [repro] git merge --ff-only [...truncated 1 lines...] [repro] ant clean [...truncated 6 lines...] [repro] Test suites by module: [repro]solr/core [repro] RestartWhileUpdatingTest [repro] HdfsRestartWhileUpdatingTest [repro] ChaosMonkeySafeLeaderWithPullReplicasTest [repro] ant compile-test [...truncated 3592 lines...] [repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=15 -Dtests.class="*.RestartWhileUpdatingTest|*.HdfsRestartWhileUpdatingTest|*.ChaosMonkeySafeLeaderWithPullReplicasTest" -Dtests.showOutput=onerror -Dtests.multiplier=2 -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.seed=E2D2198E70FF6BB6 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=mk-MK -Dtests.timezone=America/Rosario -Dtests.asserts=true -Dtests.file.encoding=UTF-8 [...truncated 101752 lines...] [repro] Setting last failure code to 256 [repro] Failures: [repro] 0/5 failed: org.apache.solr.cloud.ChaosMonkeySafeLeaderWithPullReplicasTest [repro] 0/5 failed: org.apache.solr.cloud.RestartWhileUpdatingTest [repro] 3/5 failed: org.apache.solr.cloud.hdfs.HdfsRestartWhileUpdatingTest [repro] git checkout 106d300052baf885fc70ed98b92d3b89e1baf1cc [...truncated 1 lines...] [repro] Exiting with code 256 [...truncated 5 lines...] - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (LUCENE-8623) Decrease I/O pressure when merging high dimensional points
[ https://issues.apache.org/jira/browse/LUCENE-8623?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729635#comment-16729635 ] Lucene/Solr QA commented on LUCENE-8623: | (x) *{color:red}-1 overall{color}* | \\ \\ || Vote || Subsystem || Runtime || Comment || || || || || {color:brown} Prechecks {color} || | {color:red}-1{color} | {color:red} test4tests {color} | {color:red} 0m 0s{color} | {color:red} The patch doesn't appear to include any new or modified tests. Please justify why no new tests are needed for this patch. Also please list what manual steps were performed to verify this patch. {color} | || || || || {color:brown} master Compile Tests {color} || | {color:green}+1{color} | {color:green} compile {color} | {color:green} 0m 45s{color} | {color:green} master passed {color} | || || || || {color:brown} Patch Compile Tests {color} || | {color:green}+1{color} | {color:green} compile {color} | {color:green} 0m 20s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} javac {color} | {color:green} 0m 20s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} Release audit (RAT) {color} | {color:green} 0m 20s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} Check forbidden APIs {color} | {color:green} 0m 20s{color} | {color:green} the patch passed {color} | | {color:green}+1{color} | {color:green} Validate source patterns {color} | {color:green} 0m 20s{color} | {color:green} the patch passed {color} | || || || || {color:brown} Other Tests {color} || | {color:green}+1{color} | {color:green} unit {color} | {color:green} 29m 11s{color} | {color:green} core in the patch passed. {color} | | {color:black}{color} | {color:black} {color} | {color:black} 34m 57s{color} | {color:black} {color} | \\ \\ || Subsystem || Report/Notes || | JIRA Issue | LUCENE-8623 | | JIRA Patch URL | https://issues.apache.org/jira/secure/attachment/12953120/LUCENE-8623.patch | | Optional Tests | compile javac unit ratsources checkforbiddenapis validatesourcepatterns | | uname | Linux lucene2-us-west.apache.org 4.4.0-112-generic #135-Ubuntu SMP Fri Jan 19 11:48:36 UTC 2018 x86_64 x86_64 x86_64 GNU/Linux | | Build tool | ant | | Personality | /home/jenkins/jenkins-slave/workspace/PreCommit-LUCENE-Build/sourcedir/dev-tools/test-patch/lucene-solr-yetus-personality.sh | | git revision | master / 106d300 | | ant | version: Apache Ant(TM) version 1.9.6 compiled on July 20 2018 | | Default Java | 1.8.0_191 | | Test Results | https://builds.apache.org/job/PreCommit-LUCENE-Build/143/testReport/ | | modules | C: lucene/core U: lucene/core | | Console output | https://builds.apache.org/job/PreCommit-LUCENE-Build/143/console | | Powered by | Apache Yetus 0.7.0 http://yetus.apache.org | This message was automatically generated. > Decrease I/O pressure when merging high dimensional points > -- > > Key: LUCENE-8623 > URL: https://issues.apache.org/jira/browse/LUCENE-8623 > Project: Lucene - Core > Issue Type: Improvement >Reporter: Ignacio Vera >Priority: Major > Attachments: LUCENE-8623.patch > > > Related with LUCENE-8619, after indexing 60 million shapes(~1.65 billion > triangles) using {{LatLonShape}}, the index directory grew to a size of 265 > GB when performing merging of different segments. After the processes were > over the index size was 57 GB. > As an example imagine we are merging several segments to a new segment of > size 10GB (4 dimensions). The BKD tree merging logic will create the > following files: > 1) Level 0: 4 copies of the data, each one sorted by one dimensions : 40GB > 2) Level 1: 6 copies of half of the data, left and right : 30GB > 3) Level 2: 6 copies of one quarter of the data, left and right : 15 GB > 4) Level 3: 6 more copies halving the previous level, left and right : 7.5 GB > 5) Level 4: 6 more copies halving the previous level, left and right : 3.75 GB > > and so on... So it requires around 100GB to merge that segment. > In this issue is proposed to delay the creation of sorted copies to when they > are needed. It reduces the total size required to half of what it is needed > now. > > > -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (SOLR-13029) Allow HDFS backup/restore buffer size to be configured
[ https://issues.apache.org/jira/browse/SOLR-13029?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729614#comment-16729614 ] Mikhail Khludnev commented on SOLR-13029: - [~varunthacker], [~hgadre], what's your take on that? > Allow HDFS backup/restore buffer size to be configured > -- > > Key: SOLR-13029 > URL: https://issues.apache.org/jira/browse/SOLR-13029 > Project: Solr > Issue Type: Improvement > Security Level: Public(Default Security Level. Issues are Public) > Components: Backup/Restore, hdfs >Affects Versions: 7.5, master (8.0) >Reporter: Tim Owen >Priority: Major > Attachments: SOLR-13029.patch, SOLR-13029.patch > > > There's a default hardcoded buffer size setting of 4096 in the HDFS code > which means in particular that restoring a backup from HDFS takes a long > time. Copying multi-GB files from HDFS using a buffer as small as 4096 bytes > is very inefficient. We changed this in our local build used in production to > 256kB and saw a 10x speed improvement when restoring a backup. Attached patch > simply makes this size configurable using a command line setting, much like > several other buffer size values. -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[jira] [Commented] (SOLR-7414) CSVResponseWriter returns empty field when fl alias is combined with '*' selector
[ https://issues.apache.org/jira/browse/SOLR-7414?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16729610#comment-16729610 ] Mikhail Khludnev commented on SOLR-7414: [~ichattopadhyaya], how it goes? > CSVResponseWriter returns empty field when fl alias is combined with '*' > selector > - > > Key: SOLR-7414 > URL: https://issues.apache.org/jira/browse/SOLR-7414 > Project: Solr > Issue Type: Bug > Components: Response Writers >Reporter: Michael Lawrence >Priority: Major > Attachments: SOLR-7414-old.patch, SOLR-7414.patch, SOLR-7414.patch > > > Attempting to retrieve all fields while renaming one, e.g., "inStock" to > "stocked" (URL below), results in CSV output that has a column for "inStock" > (should be "stocked"), and the column has no values. > steps to reproduce using 5.1... > {noformat} > $ bin/solr -e techproducts > ... > $ curl -X POST -H 'Content-Type: application/json' > 'http://localhost:8983/solr/techproducts/update?commit=true' --data-binary > '[{ "id" : "aaa", "bar_i" : 7, "inStock" : true }, { "id" : "bbb", "bar_i" : > 7, "inStock" : false }, { "id" : "ccc", "bar_i" : 7, "inStock" : true }]' > {"responseHeader":{"status":0,"QTime":730}} > $ curl > 'http://localhost:8983/solr/techproducts/query?q=bar_i:7=id,stocked:inStock=csv' > id,stocked > aaa,true > bbb,false > ccc,true > $ curl > 'http://localhost:8983/solr/techproducts/query?q=bar_i:7=*,stocked:inStock=csv' > bar_i,id,_version_,inStock > 7,aaa,1498719888088236032, > 7,bbb,1498719888090333184, > 7,ccc,1498719888090333185, > $ curl > 'http://localhost:8983/solr/techproducts/query?q=bar_i:7=stocked:inStock,*=csv' > bar_i,id,_version_,inStock > 7,aaa,1498719888088236032, > 7,bbb,1498719888090333184, > 7,ccc,1498719888090333185, > {noformat} -- This message was sent by Atlassian JIRA (v7.6.3#76005) - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-Tests-master - Build # 3096 - Unstable
Build: https://builds.apache.org/job/Lucene-Solr-Tests-master/3096/ 1 tests failed. FAILED: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeAddedTriggerRestoreState Error Message: The trigger did not fire at all Stack Trace: java.lang.AssertionError: The trigger did not fire at all at __randomizedtesting.SeedInfo.seed([83616FE74B9B0F2F:B5CE698715BEE82]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.assertTrue(Assert.java:41) at org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeAddedTriggerRestoreState(TestSimTriggerIntegration.java:390) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.lang.Thread.run(Thread.java:748) Build Log: [...truncated 13735 lines...] [junit4] Suite: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [junit4] 2> Creating dataDir:
[JENKINS] Lucene-Solr-master-Solaris (64bit/jdk1.8.0) - Build # 2226 - Failure!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Solaris/2226/ Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseG1GC All tests passed Build Log: [...truncated 15748 lines...] [junit4] Suite: org.apache.solr.cloud.MultiThreadedOCPTest [junit4] 2> 2038665 INFO (SUITE-MultiThreadedOCPTest-seed#[886D3004CABE64E8]-worker) [] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom [junit4] 2> Creating dataDir: /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/build/solr-core/test/J1/temp/solr.cloud.MultiThreadedOCPTest_886D3004CABE64E8-001/init-core-data-001 [junit4] 2> 2038666 WARN (SUITE-MultiThreadedOCPTest-seed#[886D3004CABE64E8]-worker) [] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=19 numCloses=19 [junit4] 2> 2038666 INFO (SUITE-MultiThreadedOCPTest-seed#[886D3004CABE64E8]-worker) [] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false [junit4] 2> 2038667 INFO (SUITE-MultiThreadedOCPTest-seed#[886D3004CABE64E8]-worker) [] o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) [junit4] 2> 2038668 INFO (SUITE-MultiThreadedOCPTest-seed#[886D3004CABE64E8]-worker) [] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: / [junit4] 2> 2038671 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER [junit4] 2> 2038672 INFO (ZkTestServer Run Thread) [] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0 [junit4] 2> 2038672 INFO (ZkTestServer Run Thread) [] o.a.s.c.ZkTestServer Starting server [junit4] 2> 2038773 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer start zk server on port:63122 [junit4] 2> 2038773 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:63122 [junit4] 2> 2038773 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer connecting to 127.0.0.1 63122 [junit4] 2> 2038785 INFO (zkConnectionManagerCallback-15371-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 2038788 INFO (zkConnectionManagerCallback-15373-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 2038789 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml [junit4] 2> 2038790 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml [junit4] 2> 2038791 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml [junit4] 2> 2038792 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt [junit4] 2> 2038794 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt [junit4] 2> 2038795 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml [junit4] 2> 2038796 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml [junit4] 2> 2038797 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put /export/home/jenkins/workspace/Lucene-Solr-master-Solaris/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json [junit4] 2> 2038798 INFO (TEST-MultiThreadedOCPTest.test-seed#[886D3004CABE64E8]) [] o.a.s.c.ZkTestServer put
[JENKINS] Lucene-Solr-7.x-Linux (64bit/jdk-10.0.1) - Build # 3285 - Still Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-Linux/3285/ Java: 64bit/jdk-10.0.1 -XX:+UseCompressedOops -XX:+UseG1GC 1 tests failed. FAILED: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeAddedTriggerRestoreState Error Message: The trigger did not fire at all Stack Trace: java.lang.AssertionError: The trigger did not fire at all at __randomizedtesting.SeedInfo.seed([A9E4295D421E1406:21D9A02278DEF5AB]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.assertTrue(Assert.java:41) at org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration.testNodeAddedTriggerRestoreState(TestSimTriggerIntegration.java:390) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:564) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.base/java.lang.Thread.run(Thread.java:844) Build Log: [...truncated 12635 lines...] [junit4] Suite: org.apache.solr.cloud.autoscaling.sim.TestSimTriggerIntegration [junit4] 2> 13171 INFO (SUITE-TestSimTriggerIntegration-seed#[A9E4295D421E1406]-worker) []
Re: ByteBuffersDirectory throws Exception
Looks like an int overflow to me, but I'd have to take a look at the code to see where it can originate. Please file a Jira issue, attach the full stack trace and name it "int overflow in Lucene50SkipWriter.writeSkipData" because that's where the problem is. We'll definitely take a look. D. On Thu, Dec 27, 2018 at 10:34 AM John Wilson wrote: > > The data set I'm experimenting with is ~100G. If I break it up into four 25G > data sets and index is individually, it works just fine. But if I try to > index the entire 100G it breaks. I'm not sure if there is a size limitation > or sth (FYI, I have a really large heap of size 256G). > > Thanks, > > On Thu, Dec 27, 2018 at 12:27 AM Dawid Weiss wrote: >> >> This looks like a bug, the underlying cause is: >> >> Caused by: java.lang.IllegalArgumentException: cannot write negative >> vLong (got: -4294878395) >> at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) >> at >> org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:180) >> >> Can you reproduce it on a smaller example and file a Jira issue? >> >> Dawid >> >> On Thu, Dec 27, 2018 at 7:18 AM John Wilson wrote: >> > >> > Hi, >> > >> > I'm getting the below error message while running a simple multi-threaded >> > indexing using ByteBuffersDirectory. Any suggestions? >> > >> > Exception in thread "Lucene Merge Thread #879" >> > org.apache.lucene.index.MergePolicy$MergeException: >> > org.apache.lucene.store.AlreadyClosedException: refusing to delete any >> > files: this IndexWriter hit an unrecoverable exception >> > at >> > org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) >> > at >> > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) >> > Caused by: org.apache.lucene.store.AlreadyClosedException: refusing to >> > delete any files: this IndexWriter hit an unrecoverable exception >> > at >> > org.apache.lucene.index.IndexFileDeleter.ensureOpen(IndexFileDeleter.java:349) >> > at >> > org.apache.lucene.index.IndexFileDeleter.deleteFiles(IndexFileDeleter.java:669) >> > at >> > org.apache.lucene.index.IndexFileDeleter.deleteNewFiles(IndexFileDeleter.java:664) >> > at >> > org.apache.lucene.index.IndexWriter.deleteNewFiles(IndexWriter.java:5024) >> > at >> > org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4539) >> > at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) >> > at >> > org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) >> > at >> > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) >> > Caused by: java.lang.IllegalArgumentException: cannot write negative vLong >> > (got: -4294878395) >> > at >> > org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) >> > at >> > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:180) >> > at >> > org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) >> > at >> > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) >> > at >> > org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) >> > at >> > org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) >> > at >> > org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) >> > at >> > org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) >> > at >> > org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) >> > at >> > org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) >> > at >> > org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) >> > at >> > org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) >> > at >> > org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) >> > ... 3 more >> > >> >> - >> To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org >> For additional commands, e-mail: dev-h...@lucene.apache.org >> - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org
[JENKINS] Lucene-Solr-7.x-Linux (32bit/jdk1.8.0_172) - Build # 3284 - Unstable!
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-Linux/3284/ Java: 32bit/jdk1.8.0_172 -server -XX:+UseParallelGC 1 tests failed. FAILED: org.apache.solr.cloud.DeleteReplicaTest.deleteLiveReplicaTest Error Message: expected:<0> but was:<1> Stack Trace: java.lang.AssertionError: expected:<0> but was:<1> at __randomizedtesting.SeedInfo.seed([5AC7A70892ED8B45:F7A713038FD22330]:0) at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.failNotEquals(Assert.java:834) at org.junit.Assert.assertEquals(Assert.java:645) at org.junit.Assert.assertEquals(Assert.java:631) at org.apache.solr.cloud.DeleteReplicaTest.deleteLiveReplicaTest(DeleteReplicaTest.java:127) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.lang.Thread.run(Thread.java:748) Build Log: [...truncated 13595 lines...] [junit4] Suite: org.apache.solr.cloud.DeleteReplicaTest [junit4] 2> Creating dataDir:
[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1733 - Failure
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1733/ 3 tests failed. FAILED: junit.framework.TestSuite.org.apache.solr.cloud.ChaosMonkeySafeLeaderWithPullReplicasTest Error Message: ObjectTracker found 6 object(s) that were not released!!! [MMapDirectory, MMapDirectory, InternalHttpClient, SolrCore, MMapDirectory, MMapDirectory] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MMapDirectory at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348) at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:503) at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:346) at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:424) at org.apache.solr.handler.ReplicationHandler.lambda$setupPolling$13(ReplicationHandler.java:1184) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MMapDirectory at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348) at org.apache.solr.core.SolrCore.getNewIndexDir(SolrCore.java:359) at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:738) at org.apache.solr.core.SolrCore.(SolrCore.java:967) at org.apache.solr.core.SolrCore.(SolrCore.java:874) at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1191) at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:697) at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:197) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.http.impl.client.InternalHttpClient at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:321) at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:330) at org.apache.solr.handler.IndexFetcher.createHttpClient(IndexFetcher.java:225) at org.apache.solr.handler.IndexFetcher.(IndexFetcher.java:267) at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:420) at org.apache.solr.cloud.RecoveryStrategy.replicate(RecoveryStrategy.java:237) at org.apache.solr.cloud.RecoveryStrategy.doReplicateOnlyRecovery(RecoveryStrategy.java:382) at org.apache.solr.cloud.RecoveryStrategy.doRecovery(RecoveryStrategy.java:328) at org.apache.solr.cloud.RecoveryStrategy.run(RecoveryStrategy.java:307) at com.codahale.metrics.InstrumentedExecutorService$InstrumentedRunnable.run(InstrumentedExecutorService.java:176) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.core.SolrCore at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.SolrCore.(SolrCore.java:1054) at org.apache.solr.core.SolrCore.(SolrCore.java:874) at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1191) at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:697) at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:197) at
Re: ByteBuffersDirectory throws Exception
The data set I'm experimenting with is ~100G. If I break it up into four 25G data sets and index is individually, it works just fine. But if I try to index the entire 100G it breaks. I'm not sure if there is a size limitation or sth (FYI, I have a really large heap of size 256G). Thanks, On Thu, Dec 27, 2018 at 12:27 AM Dawid Weiss wrote: > This looks like a bug, the underlying cause is: > > Caused by: java.lang.IllegalArgumentException: cannot write negative > vLong (got: -4294878395) > at > org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) > at > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:180) > > Can you reproduce it on a smaller example and file a Jira issue? > > Dawid > > On Thu, Dec 27, 2018 at 7:18 AM John Wilson > wrote: > > > > Hi, > > > > I'm getting the below error message while running a simple > multi-threaded indexing using ByteBuffersDirectory. Any suggestions? > > > > Exception in thread "Lucene Merge Thread #879" > org.apache.lucene.index.MergePolicy$MergeException: > org.apache.lucene.store.AlreadyClosedException: refusing to delete any > files: this IndexWriter hit an unrecoverable exception > > at > org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) > > at > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) > > Caused by: org.apache.lucene.store.AlreadyClosedException: refusing to > delete any files: this IndexWriter hit an unrecoverable exception > > at > org.apache.lucene.index.IndexFileDeleter.ensureOpen(IndexFileDeleter.java:349) > > at > org.apache.lucene.index.IndexFileDeleter.deleteFiles(IndexFileDeleter.java:669) > > at > org.apache.lucene.index.IndexFileDeleter.deleteNewFiles(IndexFileDeleter.java:664) > > at > org.apache.lucene.index.IndexWriter.deleteNewFiles(IndexWriter.java:5024) > > at > org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4539) > > at > org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) > > at > org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) > > at > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) > > Caused by: java.lang.IllegalArgumentException: cannot write negative > vLong (got: -4294878395) > > at > org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) > > at > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:180) > > at > org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) > > at > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) > > at > org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) > > at > org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) > > at > org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) > > at > org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) > > at > org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) > > at > org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) > > at > org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) > > at > org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) > > at > org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) > > ... 3 more > > > > - > To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org > For additional commands, e-mail: dev-h...@lucene.apache.org > >
[JENKINS] Lucene-Solr-NightlyTests-7.x - Build # 414 - Still Unstable
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-7.x/414/ 5 tests failed. FAILED: org.apache.lucene.search.TestInetAddressRangeQueries.testRandomBig Error Message: Test abandoned because suite timeout was reached. Stack Trace: java.lang.Exception: Test abandoned because suite timeout was reached. at __randomizedtesting.SeedInfo.seed([AABCE666E4124468]:0) FAILED: junit.framework.TestSuite.org.apache.lucene.search.TestInetAddressRangeQueries Error Message: Suite timeout exceeded (>= 720 msec). Stack Trace: java.lang.Exception: Suite timeout exceeded (>= 720 msec). at __randomizedtesting.SeedInfo.seed([AABCE666E4124468]:0) FAILED: junit.framework.TestSuite.org.apache.solr.cloud.hdfs.HdfsRestartWhileUpdatingTest Error Message: ObjectTracker found 7 object(s) that were not released!!! [MMapDirectory, MMapDirectory, TransactionLog, SolrCore, SolrIndexSearcher, MDCAwareThreadPoolExecutor, MMapDirectory] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MMapDirectory at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348) at org.apache.solr.core.SolrCore.initSnapshotMetaDataManager(SolrCore.java:508) at org.apache.solr.core.SolrCore.(SolrCore.java:959) at org.apache.solr.core.SolrCore.(SolrCore.java:874) at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1178) at org.apache.solr.core.CoreContainer.create(CoreContainer.java:1088) at org.apache.solr.handler.admin.CoreAdminOperation.lambda$static$0(CoreAdminOperation.java:92) at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:360) at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:395) at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:180) at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:199) at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:734) at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:715) at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:496) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:395) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:341) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610) at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:158) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132) at org.eclipse.jetty.server.Server.handle(Server.java:502) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103) at org.eclipse.jetty.io.ssl.SslConnection$DecryptedEndPoint.onFillable(SslConnection.java:411) at org.eclipse.jetty.io.ssl.SslConnection.onFillable(SslConnection.java:305) at org.eclipse.jetty.io.ssl.SslConnection$2.succeeded(SslConnection.java:159) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103) at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765) at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683) at java.lang.Thread.run(Thread.java:748) org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException:
Re: ByteBuffersDirectory throws Exception
This looks like a bug, the underlying cause is: Caused by: java.lang.IllegalArgumentException: cannot write negative vLong (got: -4294878395) at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) at org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:180) Can you reproduce it on a smaller example and file a Jira issue? Dawid On Thu, Dec 27, 2018 at 7:18 AM John Wilson wrote: > > Hi, > > I'm getting the below error message while running a simple multi-threaded > indexing using ByteBuffersDirectory. Any suggestions? > > Exception in thread "Lucene Merge Thread #879" > org.apache.lucene.index.MergePolicy$MergeException: > org.apache.lucene.store.AlreadyClosedException: refusing to delete any files: > this IndexWriter hit an unrecoverable exception > at > org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:705) > at > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:685) > Caused by: org.apache.lucene.store.AlreadyClosedException: refusing to delete > any files: this IndexWriter hit an unrecoverable exception > at > org.apache.lucene.index.IndexFileDeleter.ensureOpen(IndexFileDeleter.java:349) > at > org.apache.lucene.index.IndexFileDeleter.deleteFiles(IndexFileDeleter.java:669) > at > org.apache.lucene.index.IndexFileDeleter.deleteNewFiles(IndexFileDeleter.java:664) > at > org.apache.lucene.index.IndexWriter.deleteNewFiles(IndexWriter.java:5024) > at > org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4539) > at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4075) > at > org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:626) > at > org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:663) > Caused by: java.lang.IllegalArgumentException: cannot write negative vLong > (got: -4294878395) > at org.apache.lucene.store.DataOutput.writeVLong(DataOutput.java:225) > at > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.writeSkipData(Lucene50SkipWriter.java:180) > at > org.apache.lucene.codecs.MultiLevelSkipListWriter.bufferSkip(MultiLevelSkipListWriter.java:143) > at > org.apache.lucene.codecs.lucene50.Lucene50SkipWriter.bufferSkip(Lucene50SkipWriter.java:162) > at > org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter.startDoc(Lucene50PostingsWriter.java:228) > at > org.apache.lucene.codecs.PushPostingsWriterBase.writeTerm(PushPostingsWriterBase.java:148) > at > org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter$TermsWriter.write(BlockTreeTermsWriter.java:865) > at > org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter.write(BlockTreeTermsWriter.java:344) > at > org.apache.lucene.codecs.FieldsConsumer.merge(FieldsConsumer.java:105) > at > org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsWriter.merge(PerFieldPostingsFormat.java:169) > at > org.apache.lucene.index.SegmentMerger.mergeTerms(SegmentMerger.java:244) > at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:139) > at > org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4453) > ... 3 more > - To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org