This is an automated email from the ASF dual-hosted git repository. nightowl888 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/lucenenet.git
commit a20fd770759a91d3a154efdc9f6b1636313a8abb Author: rafael-aero <[email protected]> AuthorDate: Sat Oct 17 19:42:11 2020 +0200 start removing delegate-based Debugging.AssertsEnabled --- .../Analysis/Synonym/SynonymFilter.cs | 6 +-- .../Analysis/Synonym/SynonymMap.cs | 4 +- .../Analysis/Util/RollingCharBuffer.cs | 8 ++-- .../BlockTerms/BlockTermsReader.cs | 2 +- .../BlockTerms/FixedGapTermsIndexReader.cs | 6 +-- .../IntBlock/VariableIntBlockIndexInput.cs | 2 +- .../Memory/DirectPostingsFormat.cs | 4 +- .../Pulsing/PulsingPostingsWriter.cs | 2 +- src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs | 2 +- .../SimpleText/SimpleTextDocValuesReader.cs | 2 +- .../SimpleText/SimpleTextDocValuesWriter.cs | 4 +- .../SimpleText/SimpleTextFieldsWriter.cs | 2 +- src/Lucene.Net.Facet/DrillDownQuery.cs | 2 +- src/Lucene.Net.Facet/DrillSidewaysScorer.cs | 2 +- src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs | 3 +- .../Taxonomy/Directory/DirectoryTaxonomyWriter.cs | 2 +- src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs | 3 +- src/Lucene.Net.Join/ToChildBlockJoinQuery.cs | 4 +- src/Lucene.Net.Join/ToParentBlockJoinCollector.cs | 4 +- .../MemoryIndex.MemoryIndexReader.cs | 2 +- src/Lucene.Net.Misc/Document/LazyDocument.cs | 6 +-- src/Lucene.Net.Misc/Index/Sorter/Sorter.cs | 4 +- .../IndexAndTaxonomyRevision.cs | 2 +- src/Lucene.Net.Replicator/IndexRevision.cs | 2 +- src/Lucene.Net.Replicator/ReplicationClient.cs | 2 +- .../Suggest/Analyzing/AnalyzingSuggester.cs | 2 +- .../Suggest/Analyzing/FSTUtil.cs | 4 +- src/Lucene.Net/Codecs/BlockTermState.cs | 2 +- src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 18 ++++----- src/Lucene.Net/Support/Diagnostics/Debugging.cs | 47 +++++++++++++++++++--- 30 files changed, 96 insertions(+), 59 deletions(-) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs index e118268..faa5c59 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs @@ -510,7 +510,7 @@ namespace Lucene.Net.Analysis.Synonym int outputLen = chIDX - lastStart; // Caller is not allowed to have empty string in // the output: - if (Debugging.AssertsEnabled) Debugging.Assert(outputLen > 0, () => "output contains empty string: " + scratchChars); + if (Debugging.AssertsEnabled) Debugging.Assert(outputLen > 0, "output contains empty string: {0}", scratchChars); int endOffset; int posLen; if (chIDX == chEnd && lastStart == scratchChars.Offset) @@ -536,7 +536,7 @@ namespace Lucene.Net.Analysis.Synonym lastStart = 1 + chIDX; //System.out.println(" slot=" + outputUpto + " keepOrig=" + keepOrig); outputUpto = RollIncr(outputUpto); - if (Debugging.AssertsEnabled) Debugging.Assert(futureOutputs[outputUpto].posIncr == 1, () => "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(futureOutputs[outputUpto].posIncr == 1, "outputUpto={0} vs nextWrite={1}", outputUpto, nextWrite); } } } @@ -602,7 +602,7 @@ namespace Lucene.Net.Analysis.Synonym { // Pass-through case: return token we just pulled // but didn't capture: - if (Debugging.AssertsEnabled) Debugging.Assert(inputSkipCount == 1, () => "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); + if (Debugging.AssertsEnabled) Debugging.Assert(inputSkipCount == 1, "inputSkipCount={0} nextRead={1}", inputSkipCount, nextRead); } input.Reset(); if (outputs.count > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs index 2e2e9b5..3e21e9d 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs @@ -175,8 +175,8 @@ namespace Lucene.Net.Analysis.Synonym if (Debugging.AssertsEnabled) { - Debugging.Assert(!HasHoles(input), () => "input has holes: " + input); - Debugging.Assert(!HasHoles(output), () => "output has holes: " + output); + Debugging.Assert(!HasHoles(input), "input has holes: {0}", input); + Debugging.Assert(!HasHoles(output), "output has holes: {0}", output); } //System.out.println("fmap.add input=" + input + " numInputWords=" + numInputWords + " output=" + output + " numOutputWords=" + numOutputWords); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs index a840759..71340c9 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs @@ -111,7 +111,7 @@ namespace Lucene.Net.Analysis.Util if (Debugging.AssertsEnabled) Debugging.Assert(pos < nextPos); // Cannot read from already freed past: - if (Debugging.AssertsEnabled) Debugging.Assert(nextPos - pos <= count, () => "nextPos=" + nextPos + " pos=" + pos + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(nextPos - pos <= count, "nextPos={0} pos={1} count={2}", nextPos, pos, count); return buffer[GetIndex(pos)]; } @@ -140,7 +140,7 @@ namespace Lucene.Net.Analysis.Util if (Debugging.AssertsEnabled) { Debugging.Assert(length > 0); - Debugging.Assert(InBounds(posStart), () => "posStart=" + posStart + " length=" + length); + Debugging.Assert(InBounds(posStart), "posStart={0} length={1}", posStart, length); } //System.out.println(" buffer.Get posStart=" + posStart + " len=" + length); @@ -177,8 +177,8 @@ namespace Lucene.Net.Analysis.Util int newCount = nextPos - pos; if (Debugging.AssertsEnabled) { - Debugging.Assert(newCount <= count, () => "newCount=" + newCount + " count=" + count); - Debugging.Assert(newCount <= buffer.Length, () => "newCount=" + newCount + " buf.length=" + buffer.Length); + Debugging.Assert(newCount <= count, "newCount={0} count={1}", newCount, count); + Debugging.Assert(newCount <= buffer.Length, "newCount={0} buf.length={1}", newCount, buffer.Length); } count = newCount; } diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs index 6da7d50..91da1c4 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs @@ -844,7 +844,7 @@ namespace Lucene.Net.Codecs.BlockTerms seekPending = false; state.Ord = indexEnum.Ord - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(state.Ord >= -1, () => "Ord=" + state.Ord); + if (Debugging.AssertsEnabled) Debugging.Assert(state.Ord >= -1, "Ord={0}", state.Ord); term.CopyBytes(indexEnum.Term); // Now, scan: diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs index 2d62b7b..f6dfac8 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs @@ -190,7 +190,7 @@ namespace Lucene.Net.Codecs.BlockTerms { int lo = 0; // binary search int hi = fieldIndex.numIndexTerms - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.totalIndexInterval > 0, () => "totalIndexInterval=" + outerInstance.totalIndexInterval); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.totalIndexInterval > 0, "totalIndexInterval={0}", outerInstance.totalIndexInterval); while (hi >= lo) { @@ -332,7 +332,7 @@ namespace Lucene.Net.Codecs.BlockTerms this.numIndexTerms = 1 + (numIndexTerms - 1) / outerInstance.outerInstance.indexDivisor; - if (Debugging.AssertsEnabled) Debugging.Assert(this.numIndexTerms > 0, () => "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); + if (Debugging.AssertsEnabled) Debugging.Assert(this.numIndexTerms > 0, "numIndexTerms={0} indexDivisor={1}", numIndexTerms, outerInstance.outerInstance.indexDivisor); if (outerInstance.outerInstance.indexDivisor == 1) { @@ -400,7 +400,7 @@ namespace Lucene.Net.Codecs.BlockTerms clone.Seek(indexStart + termOffset); if (Debugging.AssertsEnabled) { - Debugging.Assert(indexStart + termOffset < clone.Length, () => "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); + Debugging.Assert(indexStart + termOffset < clone.Length, "indexStart={0} termOffset={1} len={2}", indexStart, termOffset, clone.Length); Debugging.Assert(indexStart + termOffset + numTermBytes < clone.Length); } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs index 0a65f5a..23b3770 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs @@ -118,7 +118,7 @@ namespace Lucene.Net.Codecs.IntBlock // TODO: should we do this in real-time, not lazy? pendingFP = fp; pendingUpto = upto; - if (Debugging.AssertsEnabled) Debugging.Assert(pendingUpto >= 0, () => "pendingUpto=" + pendingUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingUpto >= 0, "pendingUpto={0}", pendingUpto); seekPending = true; } diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs index 95b5b91..dd00def 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs @@ -1447,7 +1447,7 @@ namespace Lucene.Net.Codecs.Memory if (Debugging.AssertsEnabled) Debugging.Assert(termOrd < state.changeOrd); - if (Debugging.AssertsEnabled) Debugging.Assert(stateUpto <= termLength, () => "term.length=" + termLength + "; stateUpto=" + stateUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(stateUpto <= termLength, "term.length={0}; stateUpto={1}", termLength, stateUpto); int label = outerInstance.termBytes[termOffset + stateUpto] & 0xFF; while (label > state.transitionMax) @@ -1478,7 +1478,7 @@ namespace Lucene.Net.Codecs.Memory goto nextTermContinue; } if (Debugging.AssertsEnabled) Debugging.Assert(state.transitionUpto < state.transitions.Length, - () => " state.transitionUpto=" + state.transitionUpto + " vs " + state.transitions.Length); + " state.transitionUpto={0} vs {1}", state.transitionUpto, state.transitions.Length); state.transitionMin = state.transitions[state.transitionUpto].Min; state.transitionMax = state.transitions[state.transitionUpto].Max; if (Debugging.AssertsEnabled) diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs index e01618d..918fcc8 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs @@ -175,7 +175,7 @@ namespace Lucene.Net.Codecs.Pulsing public override void StartDoc(int docId, int termDocFreq) { - if (Debugging.AssertsEnabled) Debugging.Assert(docId >= 0, () => "Got DocID=" + docId); + if (Debugging.AssertsEnabled) Debugging.Assert(docId >= 0, "Got DocID={0}", docId); if (_pendingCount == _pending.Length) { diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs index f4b2d13..4a62621 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs @@ -265,7 +265,7 @@ namespace Lucene.Net.Codecs.Sep if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); int delta = position - lastPosition; - if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0, "position={0} lastPosition={1}", position, lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; if (storePayloads) diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs index 47ffc15..2945914 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs @@ -147,7 +147,7 @@ namespace Lucene.Net.Codecs.SimpleText Debugging.Assert(field != null); // SegmentCoreReaders already verifies this field is valid: - Debugging.Assert(field != null, () => "field=" + fieldInfo.Name + " fields=" + fields); + Debugging.Assert(field != null, "field={0} fields={1}", fieldInfo.Name, fields); } var @in = (IndexInput)data.Clone(); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs index 8ca1703..faedde2 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs @@ -69,7 +69,7 @@ namespace Lucene.Net.Codecs.SimpleText /// </summary> private bool FieldSeen(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(!_fieldsSeen.Contains(field), () => "field \"" + field + "\" was added more than once during flush"); + if (Debugging.AssertsEnabled) Debugging.Assert(!_fieldsSeen.Contains(field), "field \"{0}\" was added more than once during flush", field); _fieldsSeen.Add(field); return true; } @@ -134,7 +134,7 @@ namespace Lucene.Net.Codecs.SimpleText if (Debugging.AssertsEnabled) Debugging.Assert(numDocsWritten <= numDocs); } - if (Debugging.AssertsEnabled) Debugging.Assert(numDocs == numDocsWritten, () => "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs == numDocsWritten, "numDocs={0} numDocsWritten={1}", numDocs, numDocsWritten); } public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values) diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs index 9f39526..e91d540 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs @@ -167,7 +167,7 @@ namespace Lucene.Net.Codecs.SimpleText { Debugging.Assert(endOffset >= startOffset); Debugging.Assert(startOffset >= _lastStartOffset, - () => "startOffset=" + startOffset + " lastStartOffset=" + _lastStartOffset); + "startOffset={0} lastStartOffset={1}", startOffset, _lastStartOffset); } _lastStartOffset = startOffset; _outerInstance.Write(START_OFFSET); diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs index 40c050d..c9ac679 100644 --- a/src/Lucene.Net.Facet/DrillDownQuery.cs +++ b/src/Lucene.Net.Facet/DrillDownQuery.cs @@ -86,7 +86,7 @@ namespace Lucene.Net.Facet { throw new ArgumentException("cannot apply filter unless baseQuery isn't null; pass ConstantScoreQuery instead"); } - if (Debugging.AssertsEnabled) Debugging.Assert(clauses.Length == 1 + other.drillDownDims.Count, () => clauses.Length + " vs " + (1 + other.drillDownDims.Count)); + if (Debugging.AssertsEnabled) Debugging.Assert(clauses.Length == 1 + other.drillDownDims.Count, "{0} vs {1}", clauses.Length, (1 + other.drillDownDims.Count)); drillDownDims.PutAll(other.drillDownDims); query.Add(new FilteredQuery(clauses[0].Query, filter), Occur.MUST); for (int i = 1; i < clauses.Length; i++) diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs index afd9bc1..ff8dd1f 100644 --- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs +++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs @@ -550,7 +550,7 @@ namespace Lucene.Net.Facet //} // Mark slot as valid: - if (Debugging.AssertsEnabled) Debugging.Assert(docIDs[slot] != docID, () => "slot=" + slot + " docID=" + docID); + if (Debugging.AssertsEnabled) Debugging.Assert(docIDs[slot] != docID, "slot={0} docID={1}", slot, docID); docIDs[slot] = docID; scores[slot] = baseScorer.GetScore(); filledSlots[filledCount++] = slot; diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs index 7616e78..7233443 100644 --- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs +++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs @@ -65,7 +65,8 @@ namespace Lucene.Net.Facet.Taxonomy // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - if (Debugging.AssertsEnabled) Debugging.Assert(prefixLen > 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(prefixLen > 0 && prefixLen <= copyFrom.Components.Length, + "prefixLen cannot be negative nor larger than the given components' length: prefixLen={0} components.length={1}", prefixLen, copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs index 5c5b8a8..5b0f83b 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs @@ -907,7 +907,7 @@ namespace Lucene.Net.Facet.Taxonomy.Directory } int[] parents = GetTaxoArrays().Parents; - if (Debugging.AssertsEnabled) Debugging.Assert(ordinal < parents.Length, () => "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); + if (Debugging.AssertsEnabled) Debugging.Assert(ordinal < parents.Length, "requested ordinal ({0}); parents.length ({1}) !", ordinal, parents.Length); return parents[ordinal]; } diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs index 562d80e..9f6b20f 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs @@ -68,7 +68,8 @@ namespace Lucene.Net.Facet.Taxonomy // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - if (Debugging.AssertsEnabled) Debugging.Assert(prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, + "prefixLen cannot be negative nor larger than the given components' length: prefixLen={0} components.length={1}", prefixLen, copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs index f3e0297..bda6f6b 100644 --- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs @@ -246,7 +246,7 @@ namespace Lucene.Net.Join } } - if (Debugging.AssertsEnabled) Debugging.Assert(_childDoc < _parentDoc, () => "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(_childDoc < _parentDoc, "childDoc={0} parentDoc={1}", _childDoc, _parentDoc); _childDoc++; if (_acceptDocs != null && !_acceptDocs.Get(_childDoc)) { @@ -289,7 +289,7 @@ namespace Lucene.Net.Join return _childDoc = _parentDoc = NO_MORE_DOCS; } - if (Debugging.AssertsEnabled) Debugging.Assert(_childDoc == -1 || childTarget != _parentDoc, () => "childTarget=" + childTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(_childDoc == -1 || childTarget != _parentDoc, "childTarget={0}", childTarget); if (_childDoc == -1 || childTarget > _parentDoc) { // Advance to new parent: diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs index 61550ca..1b9df09 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs @@ -284,7 +284,7 @@ namespace Lucene.Net.Join og.counts[scorerIDX] = joinScorer.ChildCount; //System.out.println(" count=" + og.counts[scorerIDX]); og.docs[scorerIDX] = joinScorer.SwapChildDocs(og.docs[scorerIDX]); - if (Debugging.AssertsEnabled) Debugging.Assert(og.docs[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + if (Debugging.AssertsEnabled) Debugging.Assert(og.docs[scorerIDX].Length >= og.counts[scorerIDX], "length={0} vs count={1}", og.docs[scorerIDX].Length, og.counts[scorerIDX]); //System.out.println(" len=" + og.docs[scorerIDX].length); /* for(int idx=0;idx<og.counts[scorerIDX];idx++) { @@ -295,7 +295,7 @@ namespace Lucene.Net.Join { //System.out.println(" copy scores"); og.scores[scorerIDX] = joinScorer.SwapChildScores(og.scores[scorerIDX]); - if (Debugging.AssertsEnabled) Debugging.Assert(og.scores[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + if (Debugging.AssertsEnabled) Debugging.Assert(og.scores[scorerIDX].Length >= og.counts[scorerIDX], "length={0} vs count={1}", og.scores[scorerIDX].Length, og.counts[scorerIDX]); } } else diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs index 0c34b04..fb43a9b 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs @@ -461,7 +461,7 @@ namespace Lucene.Net.Index.Memory if (Debugging.AssertsEnabled) { Debugging.Assert(posUpto++ < freq); - Debugging.Assert(!sliceReader.IsEndOfSlice, () => " stores offsets : " + startOffset); + Debugging.Assert(!sliceReader.IsEndOfSlice, " stores offsets : {0}", startOffset); } if (outerInstance.outerInstance.storeOffsets) { diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs index 3e6e751..2c076bc 100644 --- a/src/Lucene.Net.Misc/Document/LazyDocument.cs +++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs @@ -121,7 +121,7 @@ namespace Lucene.Net.Documents IIndexableField[] realValues = d.GetFields(name); if (Debugging.AssertsEnabled) Debugging.Assert(realValues.Length <= lazyValues.Count, - () => "More lazy values then real values for field: " + name); + "More lazy values then real values for field: {0}", name); for (int i = 0; i < lazyValues.Count; i++) { @@ -166,8 +166,8 @@ namespace Lucene.Net.Documents } if (Debugging.AssertsEnabled) { - Debugging.Assert(HasBeenLoaded, () => "field value was not lazy loaded"); - Debugging.Assert(realValue.Name.Equals(Name, StringComparison.Ordinal), () => "realvalue name != name: " + realValue.Name + " != " + Name); + Debugging.Assert(HasBeenLoaded, "field value was not lazy loaded"); + Debugging.Assert(realValue.Name.Equals(Name, StringComparison.Ordinal), "realvalue name != name: {0} != {1}", realValue.Name, Name); } return realValue; diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs index 4aacfc4..47a6339 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs @@ -85,8 +85,8 @@ namespace Lucene.Net.Index.Sorter int oldID = docMap.NewToOld(newID); if (Debugging.AssertsEnabled) { - Debugging.Assert(newID >= 0 && newID < maxDoc, () => "doc IDs must be in [0-" + maxDoc + "[, got " + newID); - Debugging.Assert(i == oldID, () => "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); + Debugging.Assert(newID >= 0 && newID < maxDoc, "doc IDs must be in [0-{0}[, got {1}", maxDoc, newID); + Debugging.Assert(i == oldID, "mapping is inconsistent: {0} --oldToNew--> {1} --newToOld--> {2}", i, newID, oldID); } if (i != oldID || newID < 0 || newID >= maxDoc) { diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs index 8793c80..6646742 100644 --- a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs +++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs @@ -187,7 +187,7 @@ namespace Lucene.Net.Replicator /// <exception cref="IOException"></exception> public virtual Stream Open(string source, string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); + if (Debugging.AssertsEnabled) Debugging.Assert(source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), "invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source); IndexCommit commit = source.Equals(INDEX_SOURCE, StringComparison.Ordinal) ? indexCommit : taxonomyCommit; return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/IndexRevision.cs b/src/Lucene.Net.Replicator/IndexRevision.cs index d454d13..58d6a7b 100644 --- a/src/Lucene.Net.Replicator/IndexRevision.cs +++ b/src/Lucene.Net.Replicator/IndexRevision.cs @@ -134,7 +134,7 @@ namespace Lucene.Net.Replicator public virtual Stream Open(string source, string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(source.Equals(SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected={0} got={1}", SOURCE, source)); + if (Debugging.AssertsEnabled) Debugging.Assert(source.Equals(SOURCE, StringComparison.Ordinal), "invalid source; expected={0} got={1}", SOURCE, source); return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs index 23b378c..902b1f9 100644 --- a/src/Lucene.Net.Replicator/ReplicationClient.cs +++ b/src/Lucene.Net.Replicator/ReplicationClient.cs @@ -370,7 +370,7 @@ namespace Lucene.Net.Replicator // make sure to preserve revisionFiles order List<RevisionFile> res = new List<RevisionFile>(); string source = e.Key; - if (Debugging.AssertsEnabled) Debugging.Assert(newRevisionFiles.ContainsKey(source), () => string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); + if (Debugging.AssertsEnabled) Debugging.Assert(newRevisionFiles.ContainsKey(source), "source not found in newRevisionFiles: {0}", newRevisionFiles); foreach (RevisionFile file in newRevisionFiles[source]) { if (!handlerFiles.Contains(file.FileName)) diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs index 4ddb98e..e0ea82d 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs @@ -491,7 +491,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing output.WriteBytes(surfaceForm.Bytes, surfaceForm.Offset, surfaceForm.Length); } - if (Debugging.AssertsEnabled) Debugging.Assert(output.Position == requiredLength, () => output.Position + " vs " + requiredLength); + if (Debugging.AssertsEnabled) Debugging.Assert(output.Position == requiredLength, "{0} vs {1}", output.Position, requiredLength); writer.Write(buffer, 0, output.Position); } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs index 53d613d..d9f8bd7 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs @@ -121,7 +121,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing while (nextArc != null && nextArc.Label <= max) { if (Debugging.AssertsEnabled) Debugging.Assert(nextArc.Label <= max); - if (Debugging.AssertsEnabled) Debugging.Assert(nextArc.Label >= min, () => nextArc.Label + " " + min); + if (Debugging.AssertsEnabled) Debugging.Assert(nextArc.Label >= min, "{0} {1}", nextArc.Label, min); Int32sRef newInput = new Int32sRef(currentInput.Length + 1); newInput.CopyInt32s(currentInput); newInput.Int32s[currentInput.Length] = nextArc.Label; @@ -130,7 +130,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing .CopyFrom(nextArc), fst.Outputs.Add(path.Output, nextArc.Output), newInput)); int label = nextArc.Label; // used in assert nextArc = nextArc.IsLast ? null : fst.ReadNextRealArc(nextArc, fstReader); - if (Debugging.AssertsEnabled) Debugging.Assert(nextArc == null || label < nextArc.Label, () => "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); + if (Debugging.AssertsEnabled) Debugging.Assert(nextArc == null || label < nextArc.Label, "last: {0} next: {1}", label, (nextArc == null ? "" : nextArc.Label.ToString())); } } } diff --git a/src/Lucene.Net/Codecs/BlockTermState.cs b/src/Lucene.Net/Codecs/BlockTermState.cs index 3768b6b..6d7a717 100644 --- a/src/Lucene.Net/Codecs/BlockTermState.cs +++ b/src/Lucene.Net/Codecs/BlockTermState.cs @@ -57,7 +57,7 @@ namespace Lucene.Net.Codecs public override void CopyFrom(TermState other) { - if (Debugging.AssertsEnabled) Debugging.Assert(other is BlockTermState, () => "can not copy from " + other.GetType().Name); + if (Debugging.AssertsEnabled) Debugging.Assert(other is BlockTermState, "can not copy from {0}", other.GetType().Name); BlockTermState other2 = (BlockTermState)other; base.CopyFrom(other); DocFreq = other2.DocFreq; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs index fd7e5e9..2f6bf90 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs @@ -173,7 +173,7 @@ namespace Lucene.Net.Codecs @in.ReadBytes(rootCode.Bytes, 0, numBytes); rootCode.Length = numBytes; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo != null, () => "field=" + field); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo != null, "field={0}", field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVInt64(); long sumDocFreq = @in.ReadVInt64(); int docCount = @in.ReadVInt32(); @@ -478,7 +478,7 @@ namespace Lucene.Net.Codecs } endBlockCount++; long otherBytes = frame.fpEnd - frame.fp - frame.suffixesReader.Length - frame.statsReader.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(otherBytes > 0, () => "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); + if (Debugging.AssertsEnabled) Debugging.Assert(otherBytes > 0, "otherBytes={0} frame.fp={1} frame.fpEnd={2}", otherBytes, frame.fp, frame.fpEnd); TotalBlockOtherBytes += otherBytes; } @@ -491,9 +491,9 @@ namespace Lucene.Net.Codecs { if (Debugging.AssertsEnabled) { - Debugging.Assert(startBlockCount == endBlockCount, () => "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); - Debugging.Assert(TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, () => "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); - Debugging.Assert(TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, () => "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); + Debugging.Assert(startBlockCount == endBlockCount, "startBlockCount={0} endBlockCount={1}", startBlockCount, endBlockCount); + Debugging.Assert(TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, "floorSubBlockCount={0} nonFloorBlockCount={1} totalBlockCount={2}", FloorSubBlockCount, NonFloorBlockCount, TotalBlockCount); + Debugging.Assert(TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, "totalBlockCount={0} mixedBlockCount={1} subBlocksOnlyBlockCount={2} termsOnlyBlockCount={3}", TotalBlockCount, MixedBlockCount, SubBlocksOnlyBlockCount, TermsOnlyBlockCount); } } @@ -909,7 +909,7 @@ namespace Lucene.Net.Codecs public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt={0} entCount={1} fp={2}", nextEnt, entCount, fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -920,7 +920,7 @@ namespace Lucene.Net.Codecs public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt={0} entCount={1} fp={2}", nextEnt, entCount, fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -1430,7 +1430,7 @@ namespace Lucene.Net.Codecs { CopyTerm(); //if (DEBUG) System.out.println(" term match to state=" + state + "; return term=" + brToString(term)); - if (Debugging.AssertsEnabled) Debugging.Assert(savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, () => "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, "saveStartTerm={0} term={1}", savedStartTerm.Utf8ToString(), term.Utf8ToString()); return true; } else @@ -1848,7 +1848,7 @@ namespace Lucene.Net.Codecs //if (arc.label != (target.bytes[target.offset + targetUpto] & 0xFF)) { //System.out.println("FAIL: arc.label=" + (char) arc.label + " targetLabel=" + (char) (target.bytes[target.offset + targetUpto] & 0xFF)); //} - if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), "arc.label={0} targetLabel={1}", (char)arc.Label, (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); diff --git a/src/Lucene.Net/Support/Diagnostics/Debugging.cs b/src/Lucene.Net/Support/Diagnostics/Debugging.cs index f67667e..f1065d1 100644 --- a/src/Lucene.Net/Support/Diagnostics/Debugging.cs +++ b/src/Lucene.Net/Support/Diagnostics/Debugging.cs @@ -48,16 +48,51 @@ namespace Lucene.Net.Diagnostics } /// <summary> - /// Checks for a condition; if the <paramref name="condition"/> is <c>false</c>, throws an <see cref="AssertionException"/> with the message returned - /// from the specified <paramref name="messageFactory"/>. + /// Checks for a condition; if the <paramref name="condition"/> is <c>false</c>, throws an <see cref="AssertionException"/> with the message formated + /// from the specified <paramref name="messageToFormat"/>. /// </summary> /// <param name="condition">The conditional expression to evaluate. If the condition is <c>true</c>, no exception is thrown.</param> - /// <param name="messageFactory">A delegate to build the message to use.</param> + /// <param name="messageToFormat">A string format (i.e. with {0} that will be filled with the parameters</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void Assert(bool condition, Func<string> messageFactory) + public static void Assert<T0>(bool condition, string messageToFormat, T0 p0) { - if (AssertsEnabled && !condition) - throw new AssertionException(messageFactory()); + if (AssertsEnabled && !condition) throw new AssertionException(string.Format(messageToFormat, p0)); + } + + /// <summary> + /// Checks for a condition; if the <paramref name="condition"/> is <c>false</c>, throws an <see cref="AssertionException"/> with the message formated + /// from the specified <paramref name="messageToFormat"/>. + /// </summary> + /// <param name="condition">The conditional expression to evaluate. If the condition is <c>true</c>, no exception is thrown.</param> + /// <param name="messageToFormat">A string format (i.e. with {0} that will be filled with the parameters</param> + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Assert<T0, T1>(bool condition, string messageToFormat, T0 p0, T1 p1) + { + if (AssertsEnabled && !condition) throw new AssertionException(string.Format(messageToFormat, p0, p1)); + } + + /// <summary> + /// Checks for a condition; if the <paramref name="condition"/> is <c>false</c>, throws an <see cref="AssertionException"/> with the message formated + /// from the specified <paramref name="messageToFormat"/>. + /// </summary> + /// <param name="condition">The conditional expression to evaluate. If the condition is <c>true</c>, no exception is thrown.</param> + /// <param name="messageToFormat">A string format (i.e. with {0} that will be filled with the parameters</param> + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Assert<T0, T1, T2>(bool condition, string messageToFormat, T0 p0, T1 p1, T2 p2) + { + if (AssertsEnabled && !condition) throw new AssertionException(string.Format(messageToFormat, p0, p1, p2)); + } + + /// <summary> + /// Checks for a condition; if the <paramref name="condition"/> is <c>false</c>, throws an <see cref="AssertionException"/> with the message formated + /// from the specified <paramref name="messageToFormat"/>. + /// </summary> + /// <param name="condition">The conditional expression to evaluate. If the condition is <c>true</c>, no exception is thrown.</param> + /// <param name="messageToFormat">A string format (i.e. with {0} that will be filled with the parameters</param> + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Assert<T0, T1, T2, T3>(bool condition, string messageToFormat, T0 p0, T1 p1, T2 p2, T3 p3) + { + if (AssertsEnabled && !condition) throw new AssertionException(string.Format(messageToFormat, p0, p1, p2, p3)); } /// <summary>
