This is an automated email from the ASF dual-hosted git repository.
nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
The following commit(s) were added to refs/heads/master by this push:
new 285ae36 BREAKING: Removed unnecessary nullable value types (#574)
285ae36 is described below
commit 285ae36d100e3c4322f52d7b9ff18c8d2fb42a10
Author: Shad Storhaug <[email protected]>
AuthorDate: Mon Dec 13 11:25:10 2021 +0700
BREAKING: Removed unnecessary nullable value types (#574)
* Lucene.Net.Index.DocumentsWriterFlushControl: Removed unnecessary
nullable from flushingWriters declaration
* Lucene.Net.Analysis.Hunspell.Dictionary: Removed unnecessary nullables
from seenPatterns and seenStrips declarations
* Lucene.Net.Analysis.Synonym.SynonymMap: Removed unnecessary nullable from
dedupSet declaration
* Lucene.Net.Benchmark.ByTask.Feeds.EnwikiContentSource: Removed
unnecessary nullable from ELEMENTS declaration
* Lucene.Net.Codecs.Memory.DirectDocValuesProducer: Removed unnecessary
nullables from dictionary declarations
* Lucene.Net.Codecs.Memory.MemoryDocValuesConsumer::AddNumericField()
Removed unnecessary nullables from uniqueValues and decode declarations
* Lucene.Net.Codecs.Memory.MemoryDocValuesProducer: Removed unnecessary
nullables from dictionary declarations
* Lucene.Net.Codecs.SimpleText.SimpleTextFieldsReader: Removed unnecessary
nullables from dictionary declarations
* Lucene.Net.Facet.DrillDownQuery/DrillSideways: Removed unnecessary
nullables from dictionary declarations
* Lucene.Net.Facet.Range.LongRangeCounter: Removed unnecessary nullables
from collection declarations
* Lucene.Net.Search.Join.ToParentBlockJoinCollector: Removed unnecessary
nullables from collection declarations
* Lucene.Net.Misc.Document.LazyDocument: Removed unnecessary nullables from
dictionary declaration
*
Lucene.Net.QueryParsers.Flexible.Standard.Builders.MultiPhraseQueryNodeBuilder:
Removed unnecessary nullables from dictionary declaration
* BREAKING:
Lucene.Net.Suggest.Fst.FSTCompletionLookup/WFSTCompletionLookup: Changed Get()
to return long? instead of object to eliminate boxing/unboxing
* BUG: Lucene.Net.Search.Suggest.Jaspell.JaspellTernarySearchTrie: Fixed
casting bug when converting from J2N.Numerics.Number to float
* Lucene.Net.Index.BaseStoredFieldsFormatTestCase: Removed unnecessary
nullable from collection declaration
* Lucene.Net.Index.BaseTermVectorsFormatTestCase: Removed unnecessary
nullables from collection declarations
* Lucene.Net.Search.CheckHits: Removed unnecessary nullable declaration
* Lucene.Net.Codecs.PerField.PerFieldPostingsFormat.FieldsWriter: Removed
unnecessary nullable from suffixes declaration
* Lucene.Net.Index.BufferedUpdates: Removed unnecessary nullables from
collection declarations (fixes mismatch problem in FrozenBufferedUpdates where
we explicitly threw NullReferenceException)
* Lucene.Net.Index.FieldInfos: Removed unnecessary nullables from
collection declarations
* Lucene.Net.Index.SegmentDocValues: Removed unnecessary nullables from
collection and method declarations.
* Lucene.Net.Index.SnapshotDeletionPolicy: Removed unnecessary nullables
from m_indexCommits protected dictionary
* Lucene.Net.Search.MultiPhraseQuery: Removed unnecessary nullable
declarations from ToString() method
* Lucene.Net.Index.StandardDirectoryReader: Removed unnecessary nullables
from collection declarations
* Lucene.Net.Search.PhraseQuery: Removed unnecessary nullables from
collection declarations
* Lucene.Net.Search.QueryRescorer: Use HasValue and Value for nullables
rather than comparing against null and casting
* Lucene.Net.Search.SloppyPhraseScorer: Removed unnecessary nullables from
collection declarations
* Lucene.Net.Util.Automaton.BasicOperations: Removed unnecessary nullable
from map declaration
* BREAKING: Lucene.Net.Index.MergePolicy::FindForcedMerges(): Removed
unnecessary nullable from FindForcedMerges and all MergePolicy subclasses
* BREAKING: Lucene.Net.Replicator: Changed callback signature from
Func<bool?> to Action, since the return value had no semantic meaning
* Lucene.Net.Util.Automaton.AutomatonTestUtil: Removed unnecessary nullable
from leadsToAccept dictionary
* Lucene.Net.Index.DocumentsWriterStallControl: Removed unnecessary
nullable from waiting dictionary
* Lucene.Net.Codecs: Removed unnecessary nullables from DocsEnum
collections in tests
---
.../Analysis/Hunspell/Dictionary.cs | 12 +++++-----
.../Analysis/Synonym/SynonymMap.cs | 8 +++----
.../ByTask/Feeds/EnwikiContentSource.cs | 5 ++--
.../Memory/DirectDocValuesProducer.cs | 18 +++++++-------
.../Memory/MemoryDocValuesConsumer.cs | 8 +++----
.../Memory/MemoryDocValuesProducer.cs | 20 ++++++++--------
.../SimpleText/SimpleTextFieldsReader.cs | 10 ++++----
src/Lucene.Net.Facet/DrillDownQuery.cs | 12 +++++-----
src/Lucene.Net.Facet/DrillSideways.cs | 2 +-
src/Lucene.Net.Facet/Range/LongRangeCounter.cs | 20 ++++++++--------
.../Support/ToParentBlockJoinCollector.cs | 10 ++++----
src/Lucene.Net.Join/ToParentBlockJoinCollector.cs | 10 ++++----
src/Lucene.Net.Misc/Document/LazyDocument.cs | 2 +-
.../Index/Sorter/SortingMergePolicy.cs | 2 +-
.../Builders/MultiPhraseQueryNodeBuilder.cs | 2 +-
.../IndexAndTaxonomyReplicationHandler.cs | 4 ++--
.../IndexReplicationHandler.cs | 6 ++---
.../Suggest/Fst/FSTCompletionLookup.cs | 6 ++---
.../Suggest/Fst/WFSTCompletionLookup.cs | 4 ++--
.../Suggest/Jaspell/JaspellTernarySearchTrie.cs | 8 +++----
.../Index/BaseStoredFieldsFormatTestCase.cs | 2 +-
.../Index/BaseTermVectorsFormatTestCase.cs | 28 +++++++++++-----------
.../Index/MockRandomMergePolicy.cs | 2 +-
src/Lucene.Net.TestFramework/Search/CheckHits.cs | 2 +-
.../Util/Automaton/AutomatonTestUtil.cs | 4 ++--
.../Pulsing/TestPulsingReuse.cs | 4 ++--
.../IndexAndTaxonomyReplicationClientTest.cs | 4 +---
.../IndexReplicationClientTest.cs | 4 +---
.../Suggest/Fst/FSTCompletionTest.cs | 2 +-
.../Codecs/Lucene40/TestReuseDocsEnum.cs | 6 ++---
.../Index/TestPerSegmentDeletes.cs | 2 +-
.../Codecs/PerField/PerFieldPostingsFormat.cs | 10 ++++----
src/Lucene.Net/Index/BufferedUpdates.cs | 16 ++++++-------
.../Index/DocumentsWriterFlushControl.cs | 6 ++---
.../Index/DocumentsWriterStallControl.cs | 2 +-
src/Lucene.Net/Index/FieldInfos.cs | 18 +++++++-------
.../Index/FreqProxTermsWriterPerField.cs | 6 ++---
src/Lucene.Net/Index/FrozenBufferedUpdates.cs | 14 ++---------
src/Lucene.Net/Index/IndexWriter.cs | 2 +-
src/Lucene.Net/Index/LogMergePolicy.cs | 9 ++++---
src/Lucene.Net/Index/MergePolicy.cs | 4 ++--
src/Lucene.Net/Index/NoMergePolicy.cs | 4 ++--
src/Lucene.Net/Index/SegmentDocValues.cs | 20 ++++++++--------
src/Lucene.Net/Index/SegmentReader.cs | 12 +++++-----
src/Lucene.Net/Index/SnapshotDeletionPolicy.cs | 16 ++++++-------
src/Lucene.Net/Index/StandardDirectoryReader.cs | 6 ++---
src/Lucene.Net/Index/TieredMergePolicy.cs | 6 ++---
src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs | 7 +++---
src/Lucene.Net/Search/MultiPhraseQuery.cs | 4 ++--
src/Lucene.Net/Search/PhraseQuery.cs | 10 ++++----
src/Lucene.Net/Search/QueryRescorer.cs | 10 ++++----
src/Lucene.Net/Search/SloppyPhraseScorer.cs | 22 ++++++++---------
src/Lucene.Net/Util/Automaton/BasicOperations.cs | 7 +++---
53 files changed, 210 insertions(+), 230 deletions(-)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
index dec0a51..4fc4dc3 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
@@ -298,7 +298,7 @@ namespace Lucene.Net.Analysis.Hunspell
{
var prefixes = new JCG.SortedDictionary<string,
IList<int>>(StringComparer.Ordinal);
var suffixes = new JCG.SortedDictionary<string,
IList<int>>(StringComparer.Ordinal);
- IDictionary<string, int?> seenPatterns = new
JCG.Dictionary<string, int?>
+ IDictionary<string, int> seenPatterns = new JCG.Dictionary<string,
int>
{
// zero condition -> 0 ord
[".*"] = 0
@@ -306,7 +306,7 @@ namespace Lucene.Net.Analysis.Hunspell
patterns.Add(null);
// zero strip -> 0 ord
- IDictionary<string, int?> seenStrips = new
JCG.LinkedDictionary<string, int?>
+ IDictionary<string, int> seenStrips = new
JCG.LinkedDictionary<string, int>
{
[""] = 0
};
@@ -509,8 +509,8 @@ namespace Lucene.Net.Analysis.Hunspell
string header,
TextReader reader,
string conditionPattern,
- IDictionary<string, int?> seenPatterns,
- IDictionary<string, int?> seenStrips)
+ IDictionary<string, int> seenPatterns,
+ IDictionary<string, int> seenStrips)
{
BytesRef scratch = new BytesRef();
StringBuilder sb = new StringBuilder();
@@ -592,7 +592,7 @@ namespace Lucene.Net.Analysis.Hunspell
}
// deduplicate patterns
- if (!seenPatterns.TryGetValue(regex, out int? patternIndex) ||
patternIndex == null)
+ if (!seenPatterns.TryGetValue(regex, out int patternIndex))
{
patternIndex = patterns.Count;
if (patternIndex > short.MaxValue)
@@ -604,7 +604,7 @@ namespace Lucene.Net.Analysis.Hunspell
patterns.Add(pattern);
}
- if (!seenStrips.TryGetValue(strip, out int? stripOrd) ||
stripOrd == null)
+ if (!seenStrips.TryGetValue(strip, out int stripOrd))
{
stripOrd = seenStrips.Count;
seenStrips[strip] = stripOrd;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
index c3f62f6..a3f3c67 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
@@ -248,11 +248,11 @@ namespace Lucene.Net.Analysis.Synonym
BytesRef scratch = new BytesRef(64);
ByteArrayDataOutput scratchOutput = new ByteArrayDataOutput();
- ISet<int?> dedupSet;
+ ISet<int> dedupSet;
if (dedup)
{
- dedupSet = new JCG.HashSet<int?>();
+ dedupSet = new JCG.HashSet<int>();
}
else
{
@@ -292,8 +292,8 @@ namespace Lucene.Net.Analysis.Synonym
{
if (dedupSet != null)
{
- // box once
- int? ent = output.ords[i];
+ // LUCENENET specific - no boxing happening here
+ int ent = output.ords[i];
if (dedupSet.Contains(ent))
{
continue;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
index 633bf46..61bdf7c 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs
@@ -327,7 +327,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
}
}
- private static readonly IDictionary<string, int?> ELEMENTS = new
Dictionary<string, int?> // LUCENENET: Avoid static constructors (see
https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
+ private static readonly IDictionary<string, int> ELEMENTS = new
Dictionary<string, int> // LUCENENET: Avoid static constructors (see
https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
{ "page", PAGE },
{ "text", BODY },
@@ -360,8 +360,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds
/// </summary>
private static int GetElementType(string elem)
{
- ELEMENTS.TryGetValue(elem, out int? val);
- return val == null ? -1 : val.Value;
+ return ELEMENTS.TryGetValue(elem, out int val) ? val : -1;
}
private FileInfo file;
diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
index 06f6305..5c529b5 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
@@ -33,18 +33,18 @@ namespace Lucene.Net.Codecs.Memory
internal class DirectDocValuesProducer : DocValuesProducer
{
// metadata maps (just file pointers and minimal stuff)
- private readonly IDictionary<int?, NumericEntry> numerics = new
Dictionary<int?, NumericEntry>();
- private readonly IDictionary<int?, BinaryEntry> binaries = new
Dictionary<int?, BinaryEntry>();
- private readonly IDictionary<int?, SortedEntry> sorteds = new
Dictionary<int?, SortedEntry>();
- private readonly IDictionary<int?, SortedSetEntry> sortedSets = new
Dictionary<int?, SortedSetEntry>();
+ private readonly IDictionary<int, NumericEntry> numerics = new
Dictionary<int, NumericEntry>();
+ private readonly IDictionary<int, BinaryEntry> binaries = new
Dictionary<int, BinaryEntry>();
+ private readonly IDictionary<int, SortedEntry> sorteds = new
Dictionary<int, SortedEntry>();
+ private readonly IDictionary<int, SortedSetEntry> sortedSets = new
Dictionary<int, SortedSetEntry>();
private readonly IndexInput data;
// ram instances we have already loaded
- private readonly IDictionary<int?, NumericDocValues> numericInstances
= new Dictionary<int?, NumericDocValues>();
- private readonly IDictionary<int?, BinaryDocValues> binaryInstances =
new Dictionary<int?, BinaryDocValues>();
- private readonly IDictionary<int?, SortedDocValues> sortedInstances =
new Dictionary<int?, SortedDocValues>();
- private readonly IDictionary<int?, SortedSetRawValues>
sortedSetInstances = new Dictionary<int?, SortedSetRawValues>();
- private readonly IDictionary<int?, IBits> docsWithFieldInstances = new
Dictionary<int?, IBits>();
+ private readonly IDictionary<int, NumericDocValues> numericInstances =
new Dictionary<int, NumericDocValues>();
+ private readonly IDictionary<int, BinaryDocValues> binaryInstances =
new Dictionary<int, BinaryDocValues>();
+ private readonly IDictionary<int, SortedDocValues> sortedInstances =
new Dictionary<int, SortedDocValues>();
+ private readonly IDictionary<int, SortedSetRawValues>
sortedSetInstances = new Dictionary<int, SortedSetRawValues>();
+ private readonly IDictionary<int, IBits> docsWithFieldInstances = new
Dictionary<int, IBits>();
private readonly int maxDoc;
private readonly AtomicInt64 ramBytesUsed;
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
index e78a828..e378083 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
@@ -96,10 +96,10 @@ namespace Lucene.Net.Codecs.Memory
long gcd = 0;
bool missing = false;
// TODO: more efficient?
- ISet<long?> uniqueValues = null;
+ ISet<long> uniqueValues = null;
if (optimizeStorage)
{
- uniqueValues = new JCG.HashSet<long?>();
+ uniqueValues = new JCG.HashSet<long>();
long count = 0;
foreach (var nv in values)
@@ -179,14 +179,14 @@ namespace Lucene.Net.Codecs.Memory
else
{
meta.WriteByte(MemoryDocValuesProducer.TABLE_COMPRESSED);
// table-compressed
- long?[] decode = new long?[uniqueValues.Count];
+ long[] decode = new long[uniqueValues.Count];
uniqueValues.CopyTo(decode, 0);
var encode = new Dictionary<long?, int?>();
data.WriteVInt32(decode.Length);
for (int i = 0; i < decode.Length; i++)
{
- data.WriteInt64(decode[i].Value);
+ data.WriteInt64(decode[i]);
encode[decode[i]] = i;
}
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
index 8210227..74eff2b 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
@@ -38,16 +38,16 @@ namespace Lucene.Net.Codecs.Memory
internal class MemoryDocValuesProducer : DocValuesProducer
{
// metadata maps (just file pointers and minimal stuff)
- private readonly IDictionary<int?, NumericEntry> numerics;
- private readonly IDictionary<int?, BinaryEntry> binaries;
- private readonly IDictionary<int?, FSTEntry> fsts;
+ private readonly IDictionary<int, NumericEntry> numerics;
+ private readonly IDictionary<int, BinaryEntry> binaries;
+ private readonly IDictionary<int, FSTEntry> fsts;
private readonly IndexInput data;
// ram instances we have already loaded
- private readonly IDictionary<int?, NumericDocValues> numericInstances
= new Dictionary<int?, NumericDocValues>();
- private readonly IDictionary<int?, BinaryDocValues> binaryInstances =
new Dictionary<int?, BinaryDocValues>();
- private readonly IDictionary<int?, FST<long?>> fstInstances = new
Dictionary<int?, FST<long?>>();
- private readonly IDictionary<int?, IBits> docsWithFieldInstances = new
Dictionary<int?, IBits>();
+ private readonly IDictionary<int, NumericDocValues> numericInstances =
new Dictionary<int, NumericDocValues>();
+ private readonly IDictionary<int, BinaryDocValues> binaryInstances =
new Dictionary<int, BinaryDocValues>();
+ private readonly IDictionary<int, FST<long?>> fstInstances = new
Dictionary<int, FST<long?>>();
+ private readonly IDictionary<int, IBits> docsWithFieldInstances = new
Dictionary<int, IBits>();
private readonly int maxDoc;
private readonly AtomicInt64 ramBytesUsed;
@@ -81,9 +81,9 @@ namespace Lucene.Net.Codecs.Memory
try
{
version = CodecUtil.CheckHeader(@in, metaCodec, VERSION_START,
VERSION_CURRENT);
- numerics = new Dictionary<int?, NumericEntry>();
- binaries = new Dictionary<int?, BinaryEntry>();
- fsts = new Dictionary<int?, FSTEntry>();
+ numerics = new Dictionary<int, NumericEntry>();
+ binaries = new Dictionary<int, BinaryEntry>();
+ fsts = new Dictionary<int, FSTEntry>();
ReadFields(@in /*, state.FieldInfos // LUCENENET: Not
referenced */);
if (version >= VERSION_CHECKSUM)
{
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
index 0763a37..9c9bf79 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs
@@ -53,7 +53,7 @@ namespace Lucene.Net.Codecs.SimpleText
internal class SimpleTextFieldsReader : FieldsProducer
{
- private readonly JCG.SortedDictionary<string, long?> fields;
+ private readonly JCG.SortedDictionary<string, long> fields;
private readonly IndexInput input;
private readonly FieldInfos fieldInfos;
private readonly int maxDoc;
@@ -78,12 +78,12 @@ namespace Lucene.Net.Codecs.SimpleText
}
}
- private static JCG.SortedDictionary<string, long?>
ReadFields(IndexInput @in) // LUCENENET specific - marked static
+ private static JCG.SortedDictionary<string, long>
ReadFields(IndexInput @in) // LUCENENET specific - marked static
{
ChecksumIndexInput input = new BufferedChecksumIndexInput(@in);
BytesRef scratch = new BytesRef(10);
// LUCENENET specific: Use StringComparer.Ordinal to get the same
ordering as Java
- var fields = new JCG.SortedDictionary<string,
long?>(StringComparer.Ordinal);
+ var fields = new JCG.SortedDictionary<string,
long>(StringComparer.Ordinal);
while (true)
{
@@ -691,13 +691,13 @@ namespace Lucene.Net.Codecs.SimpleText
{
if (!termsCache.TryGetValue(field, out SimpleTextTerms terms)
|| terms == null)
{
- if (!fields.TryGetValue(field, out long? fp) ||
!fp.HasValue)
+ if (!fields.TryGetValue(field, out long fp))
{
return null;
}
else
{
- terms = new SimpleTextTerms(this, field, fp.Value,
maxDoc);
+ terms = new SimpleTextTerms(this, field, fp, maxDoc);
termsCache[field] = terms;
}
}
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs
b/src/Lucene.Net.Facet/DrillDownQuery.cs
index e80556f..c3b1fb2 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -62,12 +62,12 @@ namespace Lucene.Net.Facet
private readonly FacetsConfig config;
private readonly BooleanQuery query;
- private readonly IDictionary<string, int?> drillDownDims = new
JCG.LinkedDictionary<string, int?>();
+ private readonly IDictionary<string, int> drillDownDims = new
JCG.LinkedDictionary<string, int>();
/// <summary>
/// Used by <see cref="Clone"/>
/// </summary>
- internal DrillDownQuery(FacetsConfig config, BooleanQuery query,
IDictionary<string, int?> drillDownDims)
+ internal DrillDownQuery(FacetsConfig config, BooleanQuery query,
IDictionary<string, int> drillDownDims)
{
this.query = (BooleanQuery)query.Clone();
this.drillDownDims.PutAll(drillDownDims);
@@ -99,7 +99,7 @@ namespace Lucene.Net.Facet
/// <summary>
/// Used by <see cref="DrillSideways"/>
/// </summary>
- internal DrillDownQuery(FacetsConfig config, Query baseQuery,
IList<Query> clauses, IDictionary<string, int?> drillDownDims)
+ internal DrillDownQuery(FacetsConfig config, Query baseQuery,
IList<Query> clauses, IDictionary<string, int> drillDownDims)
{
query = new BooleanQuery(true);
if (baseQuery != null)
@@ -148,9 +148,9 @@ namespace Lucene.Net.Facet
private void Merge(string dim, string[] path)
{
int index = 0;
- if (drillDownDims.TryGetValue(dim, out int? idx) && idx.HasValue)
+ if (drillDownDims.TryGetValue(dim, out int idx))
{
- index = idx.Value;
+ index = idx;
}
if (query.GetClauses().Length == drillDownDims.Count + 1)
@@ -374,6 +374,6 @@ namespace Lucene.Net.Facet
internal BooleanQuery BooleanQuery => query;
- internal IDictionary<string, int?> Dims => drillDownDims;
+ internal IDictionary<string, int> Dims => drillDownDims;
}
}
\ No newline at end of file
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs
b/src/Lucene.Net.Facet/DrillSideways.cs
index 9a8b9f5..530ba80 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -150,7 +150,7 @@ namespace Lucene.Net.Facet
public virtual DrillSidewaysResult Search(DrillDownQuery query,
ICollector hitCollector)
{
- IDictionary<string, int?> drillDownDims = query.Dims;
+ IDictionary<string, int> drillDownDims = query.Dims;
FacetsCollector drillDownCollector = new FacetsCollector();
diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
index 6740240..f8f315c 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
@@ -49,7 +49,7 @@ namespace Lucene.Net.Facet.Range
// track the start vs end case separately because if a
// given point is both, then it must be its own
// elementary interval:
- IDictionary<long?, int?> endsMap = new Dictionary<long?, int?>
+ IDictionary<long, int> endsMap = new Dictionary<long, int>
{
[long.MinValue] = 1,
[long.MaxValue] = 2
@@ -57,13 +57,13 @@ namespace Lucene.Net.Facet.Range
foreach (Int64Range range in ranges)
{
- if (!endsMap.TryGetValue(range.minIncl, out int? cur))
+ if (!endsMap.TryGetValue(range.minIncl, out int cur))
{
endsMap[range.minIncl] = 1;
}
else
{
- endsMap[range.minIncl] = (int)cur | 1;
+ endsMap[range.minIncl] = cur | 1;
}
if (!endsMap.TryGetValue(range.maxIncl, out cur))
@@ -72,17 +72,17 @@ namespace Lucene.Net.Facet.Range
}
else
{
- endsMap[range.maxIncl] = (int)cur | 2;
+ endsMap[range.maxIncl] = cur | 2;
}
}
- var endsList = new JCG.List<long?>(endsMap.Keys);
+ var endsList = new JCG.List<long>(endsMap.Keys);
endsList.Sort();
// Build elementaryIntervals (a 1D Venn diagram):
IList<InclusiveRange> elementaryIntervals = new
JCG.List<InclusiveRange>();
int upto0 = 1;
- long v = endsList[0] ?? 0;
+ long v = endsList[0];
long prev;
if (endsMap[v] == 3)
{
@@ -96,8 +96,8 @@ namespace Lucene.Net.Facet.Range
while (upto0 < endsList.Count)
{
- v = endsList[upto0] ?? 0;
- int flags = endsMap[v] ?? 0;
+ v = endsList[upto0];
+ int flags = endsMap[v];
//System.out.println(" v=" + v + " flags=" + flags);
if (flags == 3)
{
@@ -308,7 +308,7 @@ namespace Lucene.Net.Facet.Range
// Which range indices to output when a query goes
// through this node:
- internal IList<int?> outputs;
+ internal IList<int> outputs;
public Int64RangeNode(long start, long end, Int64RangeNode left,
Int64RangeNode right, int leafIndex)
{
@@ -345,7 +345,7 @@ namespace Lucene.Net.Facet.Range
// range; add to our output list:
if (outputs == null)
{
- outputs = new JCG.List<int?>();
+ outputs = new JCG.List<int>();
}
outputs.Add(index);
}
diff --git a/src/Lucene.Net.Join/Support/ToParentBlockJoinCollector.cs
b/src/Lucene.Net.Join/Support/ToParentBlockJoinCollector.cs
index a81547b..648be74 100644
--- a/src/Lucene.Net.Join/Support/ToParentBlockJoinCollector.cs
+++ b/src/Lucene.Net.Join/Support/ToParentBlockJoinCollector.cs
@@ -82,7 +82,7 @@ namespace Lucene.Net.Join
// Maps each BlockJoinQuery instance to its "slot" in
// joinScorers and in OneGroup's cached doc/scores/count:
- private readonly IDictionary<Query, int?> joinQueryID = new
Dictionary<Query, int?>();
+ private readonly IDictionary<Query, int> joinQueryID = new
Dictionary<Query, int>();
private readonly int numParentHits;
private readonly FieldValueHitQueue<OneGroup> queue;
private readonly FieldComparer[] comparers;
@@ -320,9 +320,9 @@ namespace Lucene.Net.Join
private void Enroll(ToParentBlockJoinQuery query,
ToParentBlockJoinQuery.BlockJoinScorer scorer)
{
scorer.TrackPendingChildHits();
- if (joinQueryID.TryGetValue(query, out int? slot))
+ if (joinQueryID.TryGetValue(query, out int slot))
{
- joinScorers[(int)slot] = scorer;
+ joinScorers[slot] = scorer;
}
else
{
@@ -399,7 +399,7 @@ namespace Lucene.Net.Join
/// <exception cref="IOException"> if there is a low-level I/O error
</exception>
public virtual ITopGroups<int> GetTopGroups(ToParentBlockJoinQuery
query, Sort withinGroupSort, int offset, int maxDocsPerGroup, int
withinGroupOffset, bool fillSortFields)
{
- if (!joinQueryID.TryGetValue(query, out int? slot))
+ if (!joinQueryID.TryGetValue(query, out int slot))
{
if (totalHitCount == 0)
{
@@ -420,7 +420,7 @@ namespace Lucene.Net.Join
return null;
}
- return AccumulateGroups(slot == null ? -1 : (int)slot, offset,
maxDocsPerGroup, withinGroupOffset, withinGroupSort, fillSortFields);
+ return AccumulateGroups(slot == null ? -1 : slot, offset,
maxDocsPerGroup, withinGroupOffset, withinGroupSort, fillSortFields);
}
/// <summary>
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
index 64d3bee..df351b3 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
@@ -80,7 +80,7 @@ namespace Lucene.Net.Search.Join
// Maps each BlockJoinQuery instance to its "slot" in
// joinScorers and in OneGroup's cached doc/scores/count:
- private readonly IDictionary<Query, int?> joinQueryID = new
Dictionary<Query, int?>();
+ private readonly IDictionary<Query, int> joinQueryID = new
Dictionary<Query, int>();
private readonly int numParentHits;
private readonly FieldValueHitQueue<OneGroup> queue;
private readonly FieldComparer[] comparers;
@@ -318,9 +318,9 @@ namespace Lucene.Net.Search.Join
private void Enroll(ToParentBlockJoinQuery query,
ToParentBlockJoinQuery.BlockJoinScorer scorer)
{
scorer.TrackPendingChildHits();
- if (joinQueryID.TryGetValue(query, out int? slot))
+ if (joinQueryID.TryGetValue(query, out int slot))
{
- joinScorers[(int)slot] = scorer;
+ joinScorers[slot] = scorer;
}
else
{
@@ -397,7 +397,7 @@ namespace Lucene.Net.Search.Join
/// <exception cref="IOException"> if there is a low-level I/O error
</exception>
public virtual ITopGroups<int> GetTopGroups(ToParentBlockJoinQuery
query, Sort withinGroupSort, int offset, int maxDocsPerGroup, int
withinGroupOffset, bool fillSortFields)
{
- if (!joinQueryID.TryGetValue(query, out int? slot))
+ if (!joinQueryID.TryGetValue(query, out int slot))
{
if (totalHitCount == 0)
{
@@ -418,7 +418,7 @@ namespace Lucene.Net.Search.Join
return null;
}
- return AccumulateGroups(slot == null ? -1 : (int)slot, offset,
maxDocsPerGroup, withinGroupOffset, withinGroupSort, fillSortFields);
+ return AccumulateGroups(slot == null ? -1 : slot, offset,
maxDocsPerGroup, withinGroupOffset, withinGroupSort, fillSortFields);
}
/// <summary>
diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs
b/src/Lucene.Net.Misc/Document/LazyDocument.cs
index 94cbe5d..8c6ccb6 100644
--- a/src/Lucene.Net.Misc/Document/LazyDocument.cs
+++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs
@@ -41,7 +41,7 @@ namespace Lucene.Net.Documents
// null until first field is loaded
private Document doc;
- private readonly IDictionary<int?, IList<LazyField>> fields = new
Dictionary<int?, IList<LazyField>>(); // LUCENENET: marked readonly
+ private readonly IDictionary<int, IList<LazyField>> fields = new
Dictionary<int, IList<LazyField>>(); // LUCENENET: marked readonly
private readonly ISet<string> fieldNames = new JCG.HashSet<string>();
// LUCENENET: marked readonly
public LazyDocument(IndexReader reader, int docID)
diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
index 3e9477c..7bc6779 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
@@ -236,7 +236,7 @@ namespace Lucene.Net.Index.Sorter
return SortedMergeSpecification(@in.FindMerges(mergeTrigger,
segmentInfos));
}
- public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge)
{
return SortedMergeSpecification(@in.FindForcedMerges(segmentInfos,
maxSegmentCount, segmentsToMerge));
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
index e2485a8..34e66b4 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Builders
if (children != null)
{
- IDictionary<int?, JCG.List<Term>> positionTermMap = new
JCG.SortedDictionary<int?, JCG.List<Term>>();
+ IDictionary<int, JCG.List<Term>> positionTermMap = new
JCG.SortedDictionary<int, JCG.List<Term>>();
foreach (IQueryNode child in children)
{
diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyReplicationHandler.cs
b/src/Lucene.Net.Replicator/IndexAndTaxonomyReplicationHandler.cs
index d5b2e64..56f582a 100644
--- a/src/Lucene.Net.Replicator/IndexAndTaxonomyReplicationHandler.cs
+++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyReplicationHandler.cs
@@ -50,7 +50,7 @@ namespace Lucene.Net.Replicator
private readonly Directory indexDirectory;
private readonly Directory taxonomyDirectory;
- private readonly Func<bool?> callback;
+ private readonly Action callback;
private volatile IDictionary<string, IList<RevisionFile>>
currentRevisionFiles;
private volatile string currentVersion;
@@ -60,7 +60,7 @@ namespace Lucene.Net.Replicator
/// Constructor with the given index directory and callback to notify
when the indexes were updated.
/// </summary>
/// <exception cref="IOException"></exception>
- public IndexAndTaxonomyReplicationHandler(Directory indexDirectory,
Directory taxonomyDirectory, Func<bool?> callback)
+ public IndexAndTaxonomyReplicationHandler(Directory indexDirectory,
Directory taxonomyDirectory, Action callback)
{
this.indexDirectory = indexDirectory;
this.taxonomyDirectory = taxonomyDirectory;
diff --git a/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
b/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
index 18c19ca..52b77e8 100644
--- a/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
+++ b/src/Lucene.Net.Replicator/IndexReplicationHandler.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Replicator
/// if the index is never modified by <see cref="IndexWriter"/>, except
the one that is
/// open on the source index, from which you replicate.
/// <para/>
- /// This handler notifies the application via a provided <see
cref="T:Func{bool?}"/> when an
+ /// This handler notifies the application via a provided <see
cref="Action"/> when an
/// updated index commit was made available for it.
/// <para/>
/// @lucene.experimental
@@ -57,7 +57,7 @@ namespace Lucene.Net.Replicator
public const string INFO_STREAM_COMPONENT = "IndexReplicationHandler";
private readonly Directory indexDirectory;
- private readonly Func<bool?> callback;
+ private readonly Action callback;
private volatile IDictionary<string, IList<RevisionFile>>
currentRevisionFiles;
private volatile string currentVersion;
@@ -240,7 +240,7 @@ namespace Lucene.Net.Replicator
/// Constructor with the given index directory and callback to notify
when the
/// indexes were updated.
/// </summary>
- public IndexReplicationHandler(Directory indexDirectory, Func<bool?>
callback) // LUCENENET TODO: API - shouldn't this be Action ?
+ public IndexReplicationHandler(Directory indexDirectory, Action
callback)
{
this.InfoStream = InfoStream.Default;
this.callback = callback;
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
index 8b7a980..0f0cd44 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionLookup.cs
@@ -290,13 +290,13 @@ namespace Lucene.Net.Search.Suggest.Fst
}
/// <summary>
- /// Returns the bucket (weight) as a Long for the provided key if it
exists,
+ /// Returns the bucket (weight) as a <see cref="T:long?"/> for the
provided key if it exists,
/// otherwise null if it does not.
/// </summary>
- public virtual object Get(string key)
+ public virtual long? Get(string key)
{
int bucket = normalCompletion.GetBucket(key);
- return bucket == -1 ? (long?)null : bucket;
+ return bucket == -1 ? null : bucket;
}
public override bool Store(DataOutput output)
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
index c916620..c548501 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs
@@ -248,7 +248,7 @@ namespace Lucene.Net.Search.Suggest.Fst
/// Returns the weight associated with an input string,
/// or null if it does not exist.
/// </summary>
- public virtual object Get(string key)
+ public virtual long? Get(string key)
{
if (fst == null)
{
@@ -270,7 +270,7 @@ namespace Lucene.Net.Search.Suggest.Fst
}
else
{
- return DecodeWeight(result.GetValueOrDefault() +
arc.NextFinalOutput.GetValueOrDefault());
+ return DecodeWeight(result.Value +
arc.NextFinalOutput.GetValueOrDefault());
}
}
diff --git a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
index 6fda512..8192a9c 100644
--- a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs
@@ -283,7 +283,7 @@ namespace Lucene.Net.Search.Suggest.Jaspell
IOUtils.GetDecodingReader(new FileStream(file.FullName,
FileMode.Open), Encoding.UTF8);
string word;
int pos;
- float? occur, one = new float?(1);
+ float occur, one = 1f;
while ((word = @in.ReadLine()) != null)
{
pos = word.IndexOf('\t');
@@ -329,14 +329,14 @@ namespace Lucene.Net.Search.Suggest.Jaspell
currentNode = currentNode.relatives[TSTNode.HIKID];
}
}
- float? occur2 = null;
+ J2N.Numerics.Number occur2 = null; // LUCENENET: Original
was using Float, but that may not cast if there is an Int64 in the node. This
is safer.
if (node != null)
{
- occur2 = ((float?)(node.data));
+ occur2 = (J2N.Numerics.Number)node.data;
}
if (occur2 != null)
{
- occur += (float)occur2;
+ occur += occur2.ToSingle();
}
currentNode =
GetOrCreateNode(culture.TextInfo.ToLower(word.Trim()));
currentNode.data = occur;
diff --git
a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
index 8b43bca..99685b3 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
@@ -93,7 +93,7 @@ namespace Lucene.Net.Index
int docCount = AtLeast(200);
int fieldCount = TestUtil.NextInt32(rand, 1, 5);
- IList<int?> fieldIDs = new JCG.List<int?>();
+ IList<int> fieldIDs = new JCG.List<int>();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.IsTokenized = false;
diff --git
a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
index 50f14db..46f928c 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
@@ -253,9 +253,9 @@ namespace Lucene.Net.Index
internal readonly int[] startOffsets, endOffsets;
internal readonly BytesRef[] payloads;
- internal readonly IDictionary<string, int?> freqs;
- internal readonly IDictionary<int?, ISet<int?>> positionToTerms;
- internal readonly IDictionary<int?, ISet<int?>> startOffsetToTerms;
+ internal readonly IDictionary<string, int> freqs;
+ internal readonly IDictionary<int, ISet<int>> positionToTerms;
+ internal readonly IDictionary<int, ISet<int>> startOffsetToTerms;
internal readonly ICharTermAttribute termAtt;
internal readonly IPositionIncrementAttribute piAtt;
@@ -325,26 +325,26 @@ namespace Lucene.Net.Index
}
}
- positionToTerms = new Dictionary<int?, ISet<int?>>(len);
- startOffsetToTerms = new Dictionary<int?, ISet<int?>>(len);
+ positionToTerms = new Dictionary<int, ISet<int>>(len);
+ startOffsetToTerms = new Dictionary<int, ISet<int>>(len);
for (int i = 0; i < len; ++i)
{
- if (!positionToTerms.TryGetValue(positions[i], out
ISet<int?> positionTerms))
+ if (!positionToTerms.TryGetValue(positions[i], out
ISet<int> positionTerms))
{
- positionToTerms[positions[i]] = positionTerms = new
JCG.HashSet<int?>(1);
+ positionToTerms[positions[i]] = positionTerms = new
JCG.HashSet<int>(1);
}
positionTerms.Add(i);
- if (!startOffsetToTerms.TryGetValue(startOffsets[i], out
ISet<int?> startOffsetTerms))
+ if (!startOffsetToTerms.TryGetValue(startOffsets[i], out
ISet<int> startOffsetTerms))
{
- startOffsetToTerms[startOffsets[i]] = startOffsetTerms
= new JCG.HashSet<int?>(1);
+ startOffsetToTerms[startOffsets[i]] = startOffsetTerms
= new JCG.HashSet<int>(1);
}
startOffsetTerms.Add(i);
}
- freqs = new Dictionary<string, int?>();
+ freqs = new Dictionary<string, int>();
foreach (string term in terms)
{
- if (freqs.TryGetValue(term, out int? freq))
+ if (freqs.TryGetValue(term, out int freq))
{
freqs[term] = freq + 1;
}
@@ -551,7 +551,7 @@ namespace Lucene.Net.Index
Assert.IsNotNull(docsEnum);
Assert.AreEqual(0, docsEnum.NextDoc());
Assert.AreEqual(0, docsEnum.DocID);
- Assert.AreEqual(tk.freqs[termsEnum.Term.Utf8ToString()],
(int?)docsEnum.Freq);
+ Assert.AreEqual(tk.freqs[termsEnum.Term.Utf8ToString()],
docsEnum.Freq);
Assert.AreEqual(DocsEnum.NO_MORE_DOCS, docsEnum.NextDoc());
this.docsEnum.Value = docsEnum;
@@ -570,13 +570,13 @@ namespace Lucene.Net.Index
{
Assert.AreEqual(0, docsAndPositionsEnum.NextDoc());
int freq = docsAndPositionsEnum.Freq;
- Assert.AreEqual(tk.freqs[termsEnum.Term.Utf8ToString()],
(int?)freq);
+ Assert.AreEqual(tk.freqs[termsEnum.Term.Utf8ToString()],
freq);
if (docsAndPositionsEnum != null)
{
for (int k = 0; k < freq; ++k)
{
int position = docsAndPositionsEnum.NextPosition();
- ISet<int?> indexes;
+ ISet<int> indexes;
if (terms.HasPositions)
{
indexes = tk.positionToTerms[position];
diff --git a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs
b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs
index 5f1fca6..03fc938 100644
--- a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs
+++ b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs
@@ -72,7 +72,7 @@ namespace Lucene.Net.Index
return mergeSpec;
}
- public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge)
{
JCG.List<SegmentCommitInfo> eligibleSegments = new
JCG.List<SegmentCommitInfo>();
foreach (SegmentCommitInfo info in segmentInfos.Segments)
diff --git a/src/Lucene.Net.TestFramework/Search/CheckHits.cs
b/src/Lucene.Net.TestFramework/Search/CheckHits.cs
index 0e07034..cb23651 100644
--- a/src/Lucene.Net.TestFramework/Search/CheckHits.cs
+++ b/src/Lucene.Net.TestFramework/Search/CheckHits.cs
@@ -56,7 +56,7 @@ namespace Lucene.Net.Search
public static void CheckNoMatchExplanations(Query q, string
defaultFieldName, IndexSearcher searcher, int[] results)
{
string d = q.ToString(defaultFieldName);
- JCG.SortedSet<int?> ignore = new JCG.SortedSet<int?>();
+ JCG.SortedSet<int> ignore = new JCG.SortedSet<int>();
for (int i = 0; i < results.Length; i++)
{
ignore.Add(Convert.ToInt32(results[i],
CultureInfo.InvariantCulture));
diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
index 4113e76..6fa61e2 100644
--- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
+++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
@@ -411,7 +411,7 @@ namespace Lucene.Net.Util.Automaton
/// </summary>
public class RandomAcceptedStrings
{
- private readonly IDictionary<Transition, bool?> leadsToAccept;
+ private readonly IDictionary<Transition, bool> leadsToAccept;
private readonly Automaton a;
private class ArrivingTransition
@@ -437,7 +437,7 @@ namespace Lucene.Net.Util.Automaton
// must use IdentityHashmap because two Transitions w/
// different start nodes can be considered the same
- leadsToAccept = new JCG.Dictionary<Transition,
bool?>(IdentityEqualityComparer<Transition>.Default);
+ leadsToAccept = new JCG.Dictionary<Transition,
bool>(IdentityEqualityComparer<Transition>.Default);
IDictionary<State, IList<ArrivingTransition>> allArriving = new
Dictionary<State, IList<ArrivingTransition>>();
Queue<State> q = new Queue<State>();
diff --git a/src/Lucene.Net.Tests.Codecs/Pulsing/TestPulsingReuse.cs
b/src/Lucene.Net.Tests.Codecs/Pulsing/TestPulsingReuse.cs
index bd65cb0..38f93e0 100644
--- a/src/Lucene.Net.Tests.Codecs/Pulsing/TestPulsingReuse.cs
+++ b/src/Lucene.Net.Tests.Codecs/Pulsing/TestPulsingReuse.cs
@@ -50,7 +50,7 @@ namespace Lucene.Net.Codecs.Pulsing
AtomicReader segment = GetOnlySegmentReader(ir);
DocsEnum reuse = null;
- IDictionary<DocsEnum, bool?> allEnums = new
JCG.Dictionary<DocsEnum, bool?>(IdentityEqualityComparer<DocsEnum>.Default);
+ IDictionary<DocsEnum, bool> allEnums = new
JCG.Dictionary<DocsEnum, bool>(IdentityEqualityComparer<DocsEnum>.Default);
TermsEnum te = segment.GetTerms("foo").GetEnumerator();
while (te.MoveNext())
{
@@ -95,7 +95,7 @@ namespace Lucene.Net.Codecs.Pulsing
AtomicReader segment = GetOnlySegmentReader(ir);
DocsEnum reuse = null;
- IDictionary<DocsEnum, bool?> allEnums = new
JCG.Dictionary<DocsEnum, bool?>(IdentityEqualityComparer<DocsEnum>.Default);
+ IDictionary<DocsEnum, bool> allEnums = new
JCG.Dictionary<DocsEnum, bool>(IdentityEqualityComparer<DocsEnum>.Default);
TermsEnum te = segment.GetTerms("foo").GetEnumerator();
while (te.MoveNext())
{
diff --git
a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
index 8c875b0..dd6eddc 100644
--- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
+++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
@@ -67,7 +67,7 @@ namespace Lucene.Net.Replicator
}
}
- public bool? Call()
+ public void Call()
{
if (indexReader == null)
{
@@ -109,7 +109,6 @@ namespace Lucene.Net.Replicator
TopDocs docs = searcher.Search(drillDown, 10);
assertEquals(1, docs.TotalHits);
}
- return null;
}
public void Dispose()
@@ -330,7 +329,6 @@ namespace Lucene.Net.Replicator
{
if (Random.NextDouble() < 0.2 && failures > 0)
throw RuntimeException.Create("random exception from
callback");
- return null;
});
client = new ReplicationClientAnonymousClass(this, replicator,
handler, @in, failures);
client.StartUpdateThread(10, "indexAndTaxo");
diff --git a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
index f156033..d6fa219 100644
--- a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
+++ b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
@@ -56,7 +56,7 @@ namespace Lucene.Net.Replicator
}
}
- public bool? Call()
+ public void Call()
{
if (reader == null)
{
@@ -74,7 +74,6 @@ namespace Lucene.Net.Replicator
lastGeneration = newGeneration;
TestUtil.CheckIndex(indexDir);
}
- return null;
}
public void Dispose()
{
@@ -261,7 +260,6 @@ namespace Lucene.Net.Replicator
{
if (Random.NextDouble() < 0.2 && failures > 0)
throw RuntimeException.Create("random exception from
callback");
- return null;
});
client = new ReplicationClientAnonymousClass(this, replicator,
handler, sourceDirFactory, failures);
client.StartUpdateThread(10, "index");
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
index c9e56d6..9e6c80f 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
@@ -196,7 +196,7 @@ namespace Lucene.Net.Search.Suggest.Fst
long? previous = null;
foreach (Input tf in keys)
{
- long? current =
(Convert.ToInt64(lookup.Get(TestUtil.BytesToCharSequence(tf.term,
Random).ToString())));
+ long? current =
lookup.Get(TestUtil.BytesToCharSequence(tf.term, Random).ToString());
if (previous != null)
{
assertEquals(previous, current);
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
index 78fd27b..e95ccde 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
@@ -74,7 +74,7 @@ namespace Lucene.Net.Codecs.Lucene40
AtomicReader indexReader = (AtomicReader)ctx.Reader;
Terms terms = indexReader.GetTerms("body");
TermsEnum iterator = terms.GetEnumerator();
- IDictionary<DocsEnum, bool?> enums = new
JCG.Dictionary<DocsEnum, bool?>(IdentityEqualityComparer<DocsEnum>.Default);
+ IDictionary<DocsEnum, bool> enums = new
JCG.Dictionary<DocsEnum, bool>(IdentityEqualityComparer<DocsEnum>.Default);
MatchNoBits bits = new MatchNoBits(indexReader.MaxDoc);
while (iterator.MoveNext())
{
@@ -103,7 +103,7 @@ namespace Lucene.Net.Codecs.Lucene40
{
Terms terms = ((AtomicReader)ctx.Reader).GetTerms("body");
TermsEnum iterator = terms.GetEnumerator();
- IDictionary<DocsEnum, bool?> enums = new
JCG.Dictionary<DocsEnum, bool?>(IdentityEqualityComparer<DocsEnum>.Default);
+ IDictionary<DocsEnum, bool> enums = new
JCG.Dictionary<DocsEnum, bool>(IdentityEqualityComparer<DocsEnum>.Default);
MatchNoBits bits = new MatchNoBits(open.MaxDoc);
DocsEnum docs = null;
while (iterator.MoveNext())
@@ -159,7 +159,7 @@ namespace Lucene.Net.Codecs.Lucene40
{
Terms terms = ((AtomicReader)ctx.Reader).GetTerms("body");
TermsEnum iterator = terms.GetEnumerator();
- IDictionary<DocsEnum, bool?> enums = new
JCG.Dictionary<DocsEnum, bool?>(IdentityEqualityComparer<DocsEnum>.Default);
+ IDictionary<DocsEnum, bool> enums = new
JCG.Dictionary<DocsEnum, bool>(IdentityEqualityComparer<DocsEnum>.Default);
MatchNoBits bits = new MatchNoBits(firstReader.MaxDoc);
iterator = terms.GetEnumerator();
DocsEnum docs = null;
diff --git a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
index 8f9eb2d..a22f458 100644
--- a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
+++ b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
@@ -302,7 +302,7 @@ namespace Lucene.Net.Index
return null;
}
- public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge)
{
return null;
}
diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs
b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs
index c16ef24..9e6f46d 100644
--- a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs
+++ b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
using System.Globalization;
@@ -98,7 +98,7 @@ namespace Lucene.Net.Codecs.PerField
private readonly PerFieldPostingsFormat outerInstance;
internal readonly IDictionary<PostingsFormat,
FieldsConsumerAndSuffix> formats = new Dictionary<PostingsFormat,
FieldsConsumerAndSuffix>();
- internal readonly IDictionary<string, int?> suffixes = new
Dictionary<string, int?>();
+ internal readonly IDictionary<string, int> suffixes = new
Dictionary<string, int>();
internal readonly SegmentWriteState segmentWriteState;
@@ -120,14 +120,14 @@ namespace Lucene.Net.Codecs.PerField
string previousValue =
field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName);
if (Debugging.AssertsEnabled) Debugging.Assert(previousValue
== null);
- int? suffix;
+ int suffix;
if (!formats.TryGetValue(format, out FieldsConsumerAndSuffix
consumer) || consumer == null)
{
// First time we are seeing this format; create a new
instance
// bump the suffix
- if (!suffixes.TryGetValue(formatName, out suffix) ||
suffix == null)
+ if (!suffixes.TryGetValue(formatName, out suffix))
{
suffix = 0;
}
@@ -142,7 +142,7 @@ namespace Lucene.Net.Codecs.PerField
GetSuffix(formatName, Convert.ToString(suffix, CultureInfo.InvariantCulture)));
consumer = new FieldsConsumerAndSuffix();
consumer.Consumer = format.FieldsConsumer(new
SegmentWriteState(segmentWriteState, segmentSuffix));
- consumer.Suffix = suffix.Value; // LUCENENET NOTE: At this
point suffix cannot be null
+ consumer.Suffix = suffix;
formats[format] = consumer;
}
else
diff --git a/src/Lucene.Net/Index/BufferedUpdates.cs
b/src/Lucene.Net/Index/BufferedUpdates.cs
index 3d2229c..8db9286 100644
--- a/src/Lucene.Net/Index/BufferedUpdates.cs
+++ b/src/Lucene.Net/Index/BufferedUpdates.cs
@@ -122,9 +122,9 @@ namespace Lucene.Net.Index
internal readonly AtomicInt32 numTermDeletes = new AtomicInt32();
internal readonly AtomicInt32 numNumericUpdates = new AtomicInt32();
internal readonly AtomicInt32 numBinaryUpdates = new AtomicInt32();
- internal readonly SCG.IDictionary<Term, int?> terms = new
Dictionary<Term, int?>();
- internal readonly SCG.IDictionary<Query, int?> queries = new
Dictionary<Query, int?>();
- internal readonly SCG.IList<int?> docIDs = new JCG.List<int?>();
+ internal readonly SCG.IDictionary<Term, int> terms = new
Dictionary<Term, int>();
+ internal readonly SCG.IDictionary<Query, int> queries = new
Dictionary<Query, int>();
+ internal readonly SCG.IList<int> docIDs = new JCG.List<int>();
// Map<dvField,Map<updateTerm,NumericUpdate>>
@@ -206,10 +206,10 @@ namespace Lucene.Net.Index
public virtual void AddQuery(Query query, int docIDUpto)
{
- queries.TryGetValue(query, out int? prev);
+ bool prevExists = queries.TryGetValue(query, out _);
queries[query] = docIDUpto;
// increment bytes used only if the query wasn't added so far.
- if (prev == null)
+ if (!prevExists)
{
bytesUsed.AddAndGet(BYTES_PER_DEL_QUERY);
}
@@ -223,8 +223,8 @@ namespace Lucene.Net.Index
public virtual void AddTerm(Term term, int docIDUpto)
{
- terms.TryGetValue(term, out int? current);
- if (current != null && docIDUpto < current)
+ bool currentExists = terms.TryGetValue(term, out int current);
+ if (currentExists && docIDUpto < current)
{
// Only record the new number if it's greater than the
// current one. this is important because if multiple
@@ -241,7 +241,7 @@ namespace Lucene.Net.Index
// delete on that term, therefore we seem to over-count. this
over-counting
// is done to respect IndexWriterConfig.setMaxBufferedDeleteTerms.
numTermDeletes.IncrementAndGet();
- if (current == null)
+ if (!currentExists)
{
bytesUsed.AddAndGet(BYTES_PER_DEL_TERM + term.Bytes.Length +
(RamUsageEstimator.NUM_BYTES_CHAR * term.Field.Length));
}
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
index d5cd38b..a2c0be2 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
@@ -55,7 +55,7 @@ namespace Lucene.Net.Index
// only for safety reasons if a DWPT is close to the RAM limit
private readonly LinkedList<BlockedFlush> blockedFlushes = new
LinkedList<BlockedFlush>();
- private readonly IDictionary<DocumentsWriterPerThread, long?>
flushingWriters = new JCG.Dictionary<DocumentsWriterPerThread,
long?>(IdentityEqualityComparer<DocumentsWriterPerThread>.Default);
+ private readonly IDictionary<DocumentsWriterPerThread, long>
flushingWriters = new JCG.Dictionary<DocumentsWriterPerThread,
long>(IdentityEqualityComparer<DocumentsWriterPerThread>.Default);
internal double maxConfiguredRamBuffer = 0;
internal long peakActiveBytes = 0; // only with assert
@@ -293,9 +293,9 @@ namespace Lucene.Net.Index
if (Debugging.AssertsEnabled)
Debugging.Assert(flushingWriters.ContainsKey(dwpt));
try
{
- long? bytes = flushingWriters[dwpt];
+ long bytes = flushingWriters[dwpt];
flushingWriters.Remove(dwpt);
- flushBytes -= (long)bytes;
+ flushBytes -= bytes;
//perThreadPool.Recycle(dwpt); // LUCENENET: This is a
no-op method in Lucene and it cannot be overridden
if (Debugging.AssertsEnabled)
Debugging.Assert(AssertMemory());
}
diff --git a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
index 3f24887..85ad393 100644
--- a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Index
private volatile bool stalled;
private int numWaiting; // only with assert
private bool wasStalled; // only with assert
- private readonly IDictionary<ThreadJob, bool?> waiting = new
JCG.Dictionary<ThreadJob, bool?>(IdentityEqualityComparer<ThreadJob>.Default);
// only with assert
+ private readonly IDictionary<ThreadJob, bool> waiting = new
JCG.Dictionary<ThreadJob, bool>(IdentityEqualityComparer<ThreadJob>.Default);
// only with assert
/// <summary>
/// Update the stalled flag status. this method will set the stalled
flag to
diff --git a/src/Lucene.Net/Index/FieldInfos.cs
b/src/Lucene.Net/Index/FieldInfos.cs
index d8d05bd..a52f269 100644
--- a/src/Lucene.Net/Index/FieldInfos.cs
+++ b/src/Lucene.Net/Index/FieldInfos.cs
@@ -184,8 +184,8 @@ namespace Lucene.Net.Index
internal sealed class FieldNumbers
{
- private readonly IDictionary<int?, string> numberToName;
- private readonly IDictionary<string, int?> nameToNumber;
+ private readonly IDictionary<int, string> numberToName;
+ private readonly IDictionary<string, int> nameToNumber;
// We use this to enforce that a given field never
// changes DV type, even across segments / IndexWriter
@@ -199,8 +199,8 @@ namespace Lucene.Net.Index
internal FieldNumbers()
{
- this.nameToNumber = new Dictionary<string, int?>();
- this.numberToName = new Dictionary<int?, string>();
+ this.nameToNumber = new Dictionary<string, int>();
+ this.numberToName = new Dictionary<int, string>();
this.docValuesType = new Dictionary<string, DocValuesType>();
}
@@ -226,9 +226,9 @@ namespace Lucene.Net.Index
throw new ArgumentException("cannot change
DocValues type from " + currentDVType + " to " + dvType + " for field \"" +
fieldName + "\"");
}
}
- if (!nameToNumber.TryGetValue(fieldName, out int?
fieldNumber) || fieldNumber == null)
+ if (!nameToNumber.TryGetValue(fieldName, out int
fieldNumber))
{
- int? preferredBoxed = preferredFieldNumber;
+ int preferredBoxed = preferredFieldNumber;
if (preferredFieldNumber != -1 &&
!numberToName.ContainsKey(preferredBoxed))
{
@@ -249,7 +249,7 @@ namespace Lucene.Net.Index
nameToNumber[fieldName] = fieldNumber;
}
- return (int)fieldNumber;
+ return fieldNumber;
}
finally
{
@@ -258,13 +258,13 @@ namespace Lucene.Net.Index
}
// used by assert
- internal bool ContainsConsistent(int? number, string name,
DocValuesType dvType)
+ internal bool ContainsConsistent(int number, string name,
DocValuesType dvType)
{
UninterruptableMonitor.Enter(this);
try
{
numberToName.TryGetValue(number, out string
numberToNameStr);
- nameToNumber.TryGetValue(name, out int? nameToNumberVal);
+ nameToNumber.TryGetValue(name, out int nameToNumberVal);
this.docValuesType.TryGetValue(name, out DocValuesType
docValuesType);
return name.Equals(numberToNameStr,
StringComparison.Ordinal)
diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
index e51cc32..ab3f203 100644
--- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
+++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
@@ -441,7 +441,7 @@ namespace Lucene.Net.Index
Debugging.Assert(!writeOffsets || writePositions);
}
- IDictionary<Term, int?> segDeletes;
+ IDictionary<Term, int> segDeletes;
if (state.SegUpdates != null && state.SegUpdates.terms.Count > 0)
{
segDeletes = state.SegUpdates.terms;
@@ -484,11 +484,11 @@ namespace Lucene.Net.Index
PostingsConsumer postingsConsumer =
termsConsumer.StartTerm(text);
- int? delDocLimit;
+ int delDocLimit;
if (segDeletes != null)
{
protoTerm.Bytes = text;
- if (segDeletes.TryGetValue(protoTerm, out int? docIDUpto)
&& docIDUpto != null)
+ if (segDeletes.TryGetValue(protoTerm, out int docIDUpto))
{
delDocLimit = docIDUpto;
}
diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
index 7edeef4..d8a499f 100644
--- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
+++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
@@ -83,20 +83,10 @@ namespace Lucene.Net.Index
queries = new Query[deletes.queries.Count];
queryLimits = new int[deletes.queries.Count];
int upto = 0;
- foreach (KeyValuePair<Query, int?> ent in deletes.queries)
+ foreach (KeyValuePair<Query, int> ent in deletes.queries)
{
queries[upto] = ent.Key;
- if (ent.Value.HasValue)
- {
- queryLimits[upto] = ent.Value.Value;
- }
- else
- {
- // LUCENENET NOTE: According to this:
http://stackoverflow.com/a/13914344
- // we are supposed to throw an exception in this case,
rather than
- // silently fail.
- throw new NullReferenceException();
- }
+ queryLimits[upto] = ent.Value;
upto++;
}
diff --git a/src/Lucene.Net/Index/IndexWriter.cs
b/src/Lucene.Net/Index/IndexWriter.cs
index f0f5461..1f3468e 100644
--- a/src/Lucene.Net/Index/IndexWriter.cs
+++ b/src/Lucene.Net/Index/IndexWriter.cs
@@ -247,7 +247,7 @@ namespace Lucene.Net.Index
internal readonly IndexFileDeleter deleter;
// used by forceMerge to note those needing merging
- private readonly IDictionary<SegmentCommitInfo, bool?> segmentsToMerge
= new Dictionary<SegmentCommitInfo, bool?>();
+ private readonly IDictionary<SegmentCommitInfo, bool> segmentsToMerge
= new Dictionary<SegmentCommitInfo, bool>();
private int mergeMaxNumSegments;
diff --git a/src/Lucene.Net/Index/LogMergePolicy.cs
b/src/Lucene.Net/Index/LogMergePolicy.cs
index e648a35..324d049 100644
--- a/src/Lucene.Net/Index/LogMergePolicy.cs
+++ b/src/Lucene.Net/Index/LogMergePolicy.cs
@@ -223,7 +223,7 @@ namespace Lucene.Net.Index
/// merging is less than or equal to the specified
/// <paramref name="maxNumSegments"/>.
/// </summary>
- protected virtual bool IsMerged(SegmentInfos infos, int
maxNumSegments, IDictionary<SegmentCommitInfo, bool?> segmentsToMerge)
+ protected virtual bool IsMerged(SegmentInfos infos, int
maxNumSegments, IDictionary<SegmentCommitInfo, bool> segmentsToMerge)
{
int numSegments = infos.Count;
int numToMerge = 0;
@@ -232,9 +232,9 @@ namespace Lucene.Net.Index
for (int i = 0; i < numSegments && numToMerge <= maxNumSegments;
i++)
{
SegmentCommitInfo info = infos.Info(i);
- if (segmentsToMerge.TryGetValue(info, out bool? isOriginal) &&
isOriginal != null)
+ if (segmentsToMerge.TryGetValue(info, out bool isOriginal))
{
- segmentIsOriginal = isOriginal.Value;
+ segmentIsOriginal = isOriginal;
numToMerge++;
mergeInfo = info;
}
@@ -364,7 +364,6 @@ namespace Lucene.Net.Index
return spec.Merges.Count == 0 ? null : spec;
}
- // LUCENENET TODO: Get rid of the nullable in
IDictionary<SegmentCommitInfo, bool?>, if possible
/// <summary>
/// Returns the merges necessary to merge the index down
/// to a specified number of segments.
@@ -377,7 +376,7 @@ namespace Lucene.Net.Index
/// (mergeFactor at a time) so the <see cref="MergeScheduler"/>
/// in use may make use of concurrency.
/// </summary>
- public override MergeSpecification FindForcedMerges(SegmentInfos
infos, int maxNumSegments, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
infos, int maxNumSegments, IDictionary<SegmentCommitInfo, bool> segmentsToMerge)
{
if (Debugging.AssertsEnabled) Debugging.Assert(maxNumSegments > 0);
if (IsVerbose)
diff --git a/src/Lucene.Net/Index/MergePolicy.cs
b/src/Lucene.Net/Index/MergePolicy.cs
index 1bff1b7..14d1778 100644
--- a/src/Lucene.Net/Index/MergePolicy.cs
+++ b/src/Lucene.Net/Index/MergePolicy.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Index
/// <see cref="MergeSpecification"/> instance describing the set of
/// merges that should be done, or null if no merges are
/// necessary. When <see cref="IndexWriter.ForceMerge(int)"/> is called,
it calls
- /// <see cref="FindForcedMerges(SegmentInfos, int,
IDictionary{SegmentCommitInfo, bool?})"/> and the <see cref="MergePolicy"/>
should
+ /// <see cref="FindForcedMerges(SegmentInfos, int,
IDictionary{SegmentCommitInfo, bool})"/> and the <see cref="MergePolicy"/>
should
/// then return the necessary merges.</para>
///
/// <para>Note that the policy can return more than one merge at
@@ -680,7 +680,7 @@ namespace Lucene.Net.Index
/// an original segment present in the
/// to-be-merged index; else, it was a segment
/// produced by a cascaded merge. </param>
- public abstract MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge);
+ public abstract MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge);
/// <summary>
/// Determine what set of merge operations is necessary in order to
expunge all
diff --git a/src/Lucene.Net/Index/NoMergePolicy.cs
b/src/Lucene.Net/Index/NoMergePolicy.cs
index 2354988..d87a762 100644
--- a/src/Lucene.Net/Index/NoMergePolicy.cs
+++ b/src/Lucene.Net/Index/NoMergePolicy.cs
@@ -1,4 +1,4 @@
-using System.Collections.Generic;
+using System.Collections.Generic;
namespace Lucene.Net.Index
{
@@ -60,7 +60,7 @@ namespace Lucene.Net.Index
return null;
}
- public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge)
{
return null;
}
diff --git a/src/Lucene.Net/Index/SegmentDocValues.cs
b/src/Lucene.Net/Index/SegmentDocValues.cs
index d150f4f..e87e0c7 100644
--- a/src/Lucene.Net/Index/SegmentDocValues.cs
+++ b/src/Lucene.Net/Index/SegmentDocValues.cs
@@ -37,16 +37,16 @@ namespace Lucene.Net.Index
/// </summary>
internal sealed class SegmentDocValues
{
- private readonly IDictionary<long?, RefCount<DocValuesProducer>>
genDVProducers = new Dictionary<long?, RefCount<DocValuesProducer>>();
+ private readonly IDictionary<long, RefCount<DocValuesProducer>>
genDVProducers = new Dictionary<long, RefCount<DocValuesProducer>>();
- private RefCount<DocValuesProducer>
NewDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir,
DocValuesFormat dvFormat, long? gen, IList<FieldInfo> infos, int
termsIndexDivisor)
+ private RefCount<DocValuesProducer>
NewDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir,
DocValuesFormat dvFormat, long gen, IList<FieldInfo> infos, int
termsIndexDivisor)
{
Directory dvDir = dir;
string segmentSuffix = "";
- if ((long)gen != -1)
+ if (gen != -1)
{
dvDir = si.Info.Dir; // gen'd files are written outside CFS,
so use SegInfo directory
- segmentSuffix =
((long)gen).ToString(CultureInfo.InvariantCulture);//Convert.ToString((long)gen,
Character.MAX_RADIX);
+ segmentSuffix =
gen.ToString(CultureInfo.InvariantCulture);//Convert.ToString((long)gen,
Character.MAX_RADIX);
}
// set SegmentReadState to list only the fields that are relevant
to that gen
@@ -57,9 +57,9 @@ namespace Lucene.Net.Index
private class RefCountHelper : RefCount<DocValuesProducer>
{
private readonly SegmentDocValues outerInstance;
- private readonly long? gen; // LUCENENET: marked readonly
+ private readonly long gen; // LUCENENET: marked readonly
- public RefCountHelper(SegmentDocValues outerInstance,
DocValuesProducer fieldsProducer, long? gen)
+ public RefCountHelper(SegmentDocValues outerInstance,
DocValuesProducer fieldsProducer, long gen)
: base(fieldsProducer)
{
this.outerInstance = outerInstance;
@@ -83,7 +83,7 @@ namespace Lucene.Net.Index
/// <summary>
/// Returns the <see cref="DocValuesProducer"/> for the given
generation. </summary>
- internal DocValuesProducer GetDocValuesProducer(long? gen,
SegmentCommitInfo si, IOContext context, Directory dir, DocValuesFormat
dvFormat, IList<FieldInfo> infos, int termsIndexDivisor)
+ internal DocValuesProducer GetDocValuesProducer(long gen,
SegmentCommitInfo si, IOContext context, Directory dir, DocValuesFormat
dvFormat, IList<FieldInfo> infos, int termsIndexDivisor)
{
UninterruptableMonitor.Enter(this);
try
@@ -110,15 +110,15 @@ namespace Lucene.Net.Index
/// Decrement the reference count of the given <see
cref="DocValuesProducer"/>
/// generations.
/// </summary>
- internal void DecRef(IList<long?> dvProducersGens)
+ internal void DecRef(IList<long> dvProducersGens)
{
UninterruptableMonitor.Enter(this);
try
{
Exception t = null;
- foreach (long? gen in dvProducersGens)
+ foreach (long gen in dvProducersGens)
{
- RefCount<DocValuesProducer> dvp = genDVProducers[gen];
+ genDVProducers.TryGetValue(gen, out
RefCount<DocValuesProducer> dvp);
if (Debugging.AssertsEnabled) Debugging.Assert(dvp !=
null,"gen={0}", gen);
try
{
diff --git a/src/Lucene.Net/Index/SegmentReader.cs
b/src/Lucene.Net/Index/SegmentReader.cs
index 7e780dc..6700f02 100644
--- a/src/Lucene.Net/Index/SegmentReader.cs
+++ b/src/Lucene.Net/Index/SegmentReader.cs
@@ -69,7 +69,7 @@ namespace Lucene.Net.Index
private readonly FieldInfos fieldInfos; // LUCENENET specific - since
it is readonly, made all internal classes use property
- private readonly IList<long?> dvGens = new JCG.List<long?>();
+ private readonly IList<long> dvGens = new JCG.List<long>();
/// <summary>
/// Constructs a new <see cref="SegmentReader"/> with a new core.
</summary>
@@ -187,15 +187,15 @@ namespace Lucene.Net.Index
{
Directory dir = core.cfsReader ?? si.Info.Dir;
DocValuesFormat dvFormat = codec.DocValuesFormat;
- IDictionary<long?, IList<FieldInfo>> genInfos = GetGenInfos();
+ IDictionary<long, IList<FieldInfo>> genInfos = GetGenInfos();
// System.out.println("[" + Thread.currentThread().getName()
+ "] SR.initDocValuesProducers: segInfo=" + si + "; gens=" + genInfos.keySet());
// TODO: can we avoid iterating over fieldinfos several times and
creating maps of all this stuff if dv updates do not exist?
- foreach (KeyValuePair<long?, IList<FieldInfo>> e in genInfos)
+ foreach (KeyValuePair<long, IList<FieldInfo>> e in genInfos)
{
- long? gen = e.Key;
+ long gen = e.Key;
IList<FieldInfo> infos = e.Value;
DocValuesProducer dvp = segDocValues.GetDocValuesProducer(gen,
si, IOContext.READ, dir, dvFormat, infos, TermInfosIndexDivisor);
foreach (FieldInfo fi in infos)
@@ -245,9 +245,9 @@ namespace Lucene.Net.Index
}
// returns a gen->List<FieldInfo> mapping. Fields without DV updates
have gen=-1
- private IDictionary<long?, IList<FieldInfo>> GetGenInfos()
+ private IDictionary<long, IList<FieldInfo>> GetGenInfos()
{
- IDictionary<long?, IList<FieldInfo>> genInfos = new
Dictionary<long?, IList<FieldInfo>>();
+ IDictionary<long, IList<FieldInfo>> genInfos = new
Dictionary<long, IList<FieldInfo>>();
foreach (FieldInfo fi in FieldInfos)
{
if (fi.DocValuesType == DocValuesType.NONE)
diff --git a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs
b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs
index 978119d..3513077 100644
--- a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs
+++ b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Index
/// <summary>
/// Used to map gen to <see cref="IndexCommit"/>. </summary>
- protected IDictionary<long?, IndexCommit> m_indexCommits = new
Dictionary<long?, IndexCommit>();
+ protected IDictionary<long, IndexCommit> m_indexCommits = new
Dictionary<long, IndexCommit>();
/// <summary>
/// Wrapped <see cref="IndexDeletionPolicy"/> </summary>
@@ -141,22 +141,20 @@ namespace Lucene.Net.Index
{
throw IllegalStateException.Create("this instance is not being
used by IndexWriter; be sure to use the instance returned from
writer.Config.IndexDeletionPolicy");
}
- int? refCount = m_refCounts[gen];
- if (refCount == null)
+ if (!m_refCounts.TryGetValue(gen, out int refCount))
{
throw new ArgumentException("commit gen=" + gen + " is not
currently snapshotted");
}
- int refCountInt = (int)refCount;
- if (Debugging.AssertsEnabled) Debugging.Assert(refCountInt > 0);
- refCountInt--;
- if (refCountInt == 0)
+ if (Debugging.AssertsEnabled) Debugging.Assert(refCount > 0);
+ refCount--;
+ if (refCount == 0)
{
m_refCounts.Remove(gen);
m_indexCommits.Remove(gen);
}
else
{
- m_refCounts[gen] = refCountInt;
+ m_refCounts[gen] = refCount;
}
}
@@ -293,7 +291,7 @@ namespace Lucene.Net.Index
other.primary = (IndexDeletionPolicy)this.primary.Clone();
other.m_lastCommit = null;
other.m_refCounts = new Dictionary<long, int>(m_refCounts);
- other.m_indexCommits = new Dictionary<long?,
IndexCommit>(m_indexCommits);
+ other.m_indexCommits = new Dictionary<long,
IndexCommit>(m_indexCommits);
return other;
}
finally
diff --git a/src/Lucene.Net/Index/StandardDirectoryReader.cs
b/src/Lucene.Net/Index/StandardDirectoryReader.cs
index 1b9beef..1b0776b 100644
--- a/src/Lucene.Net/Index/StandardDirectoryReader.cs
+++ b/src/Lucene.Net/Index/StandardDirectoryReader.cs
@@ -170,7 +170,7 @@ namespace Lucene.Net.Index
{
// we put the old SegmentReaders in a map, that allows us
// to lookup a reader using its segment name
- IDictionary<string, int?> segmentReaders = new Dictionary<string,
int?>();
+ IDictionary<string, int> segmentReaders = new Dictionary<string,
int>();
if (oldReaders != null)
{
@@ -191,7 +191,7 @@ namespace Lucene.Net.Index
for (int i = infos.Count - 1; i >= 0; i--)
{
// find SegmentReader for this segment
- if (!segmentReaders.TryGetValue(infos.Info(i).Info.Name, out
int? oldReaderIndex) || oldReaderIndex == null)
+ if (!segmentReaders.TryGetValue(infos.Info(i).Info.Name, out
int oldReaderIndex))
{
// this is a new segment, no old SegmentReader can be
reused
newReaders[i] = null;
@@ -199,7 +199,7 @@ namespace Lucene.Net.Index
else
{
// there is an old reader for this segment - we'll try to
reopen it
- newReaders[i] =
(SegmentReader)oldReaders[(int)oldReaderIndex];
+ newReaders[i] = (SegmentReader)oldReaders[oldReaderIndex];
}
bool success = false;
diff --git a/src/Lucene.Net/Index/TieredMergePolicy.cs
b/src/Lucene.Net/Index/TieredMergePolicy.cs
index 25eeb84..f3a40fa 100644
--- a/src/Lucene.Net/Index/TieredMergePolicy.cs
+++ b/src/Lucene.Net/Index/TieredMergePolicy.cs
@@ -566,7 +566,7 @@ namespace Lucene.Net.Index
public override string Explanation => "skew=" +
string.Format(CultureInfo.InvariantCulture, "{0:F3}", skew) + " nonDelRatio=" +
string.Format(CultureInfo.InvariantCulture, "{0:F3}", nonDelRatio);
}
- public override MergeSpecification FindForcedMerges(SegmentInfos
infos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
infos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge)
{
if (Verbose())
{
@@ -578,10 +578,10 @@ namespace Lucene.Net.Index
JCG.List<SegmentCommitInfo> eligible = new
JCG.List<SegmentCommitInfo>();
bool forceMergeRunning = false;
ICollection<SegmentCommitInfo> merging =
m_writer.Get().MergingSegments;
- bool? segmentIsOriginal = false;
+ bool segmentIsOriginal = false;
foreach (SegmentCommitInfo info in infos.Segments)
{
- if (segmentsToMerge.TryGetValue(info, out bool? isOriginal))
+ if (segmentsToMerge.TryGetValue(info, out bool isOriginal))
{
segmentIsOriginal = isOriginal;
if (!merging.Contains(info))
diff --git a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs
b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs
index ecbb4cd..8a87984 100644
--- a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs
+++ b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs
@@ -87,14 +87,13 @@ namespace Lucene.Net.Index
return m_base.FindMerges((MergeTrigger)int.MinValue, segmentInfos);
}
- public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?>
segmentsToMerge)
+ public override MergeSpecification FindForcedMerges(SegmentInfos
segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool>
segmentsToMerge)
{
// first find all old segments
- IDictionary<SegmentCommitInfo, bool?> oldSegments = new
Dictionary<SegmentCommitInfo, bool?>();
+ IDictionary<SegmentCommitInfo, bool> oldSegments = new
Dictionary<SegmentCommitInfo, bool>();
foreach (SegmentCommitInfo si in segmentInfos.Segments)
{
- bool? v = segmentsToMerge[si];
- if (v != null && ShouldUpgradeSegment(si))
+ if (segmentsToMerge.TryGetValue(si, out bool v) &&
ShouldUpgradeSegment(si))
{
oldSegments[si] = v;
}
diff --git a/src/Lucene.Net/Search/MultiPhraseQuery.cs
b/src/Lucene.Net/Search/MultiPhraseQuery.cs
index 06e6048..0979988 100644
--- a/src/Lucene.Net/Search/MultiPhraseQuery.cs
+++ b/src/Lucene.Net/Search/MultiPhraseQuery.cs
@@ -386,11 +386,11 @@ namespace Lucene.Net.Search
buffer.Append("\"");
int k = 0;
- int? lastPos = -1;
+ int lastPos = -1;
bool first = true;
foreach (Term[] terms in termArrays)
{
- int? position = positions[k];
+ int position = positions[k];
if (first)
{
first = false;
diff --git a/src/Lucene.Net/Search/PhraseQuery.cs
b/src/Lucene.Net/Search/PhraseQuery.cs
index b598099..9b28663 100644
--- a/src/Lucene.Net/Search/PhraseQuery.cs
+++ b/src/Lucene.Net/Search/PhraseQuery.cs
@@ -68,7 +68,7 @@ namespace Lucene.Net.Search
{
private string field;
private readonly IList<Term> terms = new JCG.List<Term>(4); //
LUCENENET: marked readonly
- private readonly IList<int?> positions = new JCG.List<int?>(4); //
LUCENENET: marked readonly
+ private readonly IList<int> positions = new JCG.List<int>(4); //
LUCENENET: marked readonly
private int maxPosition = 0;
private int slop = 0;
@@ -115,7 +115,7 @@ namespace Lucene.Net.Search
int position = 0;
if (positions.Count > 0)
{
- position = (int)positions[positions.Count - 1] + 1;
+ position = positions[positions.Count - 1] + 1;
}
Add(term, position);
@@ -161,7 +161,7 @@ namespace Lucene.Net.Search
int[] result = new int[positions.Count];
for (int i = 0; i < positions.Count; i++)
{
- result[i] = (int)positions[i];
+ result[i] = positions[i];
}
return result;
}
@@ -372,7 +372,7 @@ namespace Lucene.Net.Search
// term does exist, but has no positions
throw IllegalStateException.Create("field \"" +
t.Field + "\" was indexed without position data; cannot run PhraseQuery (term="
+ t.Text + ")");
}
- postingsFreqs[i] = new PostingsAndFreq(postingsEnum,
te.DocFreq, (int)outerInstance.positions[i], t);
+ postingsFreqs[i] = new PostingsAndFreq(postingsEnum,
te.DocFreq, outerInstance.positions[i], t);
}
// sort by increasing docFreq order
@@ -455,7 +455,7 @@ namespace Lucene.Net.Search
string[] pieces = new string[maxPosition + 1];
for (int i = 0; i < terms.Count; i++)
{
- int pos = (int)positions[i];
+ int pos = positions[i];
string s = pieces[pos];
if (s == null)
{
diff --git a/src/Lucene.Net/Search/QueryRescorer.cs
b/src/Lucene.Net/Search/QueryRescorer.cs
index df53424..6fc885a 100644
--- a/src/Lucene.Net/Search/QueryRescorer.cs
+++ b/src/Lucene.Net/Search/QueryRescorer.cs
@@ -146,16 +146,16 @@ namespace Lucene.Net.Search
{
Explanation secondPassExplanation = searcher.Explain(query, docID);
- float? secondPassScore = secondPassExplanation.IsMatch ?
(float?)secondPassExplanation.Value : null;
+ float? secondPassScore = secondPassExplanation.IsMatch ?
secondPassExplanation.Value : null;
float score;
- if (secondPassScore == null)
+ if (!secondPassScore.HasValue)
{
score = Combine(firstPassExplanation.Value, false, 0.0f);
}
else
{
- score = Combine(firstPassExplanation.Value, true,
(float)secondPassScore);
+ score = Combine(firstPassExplanation.Value, true,
secondPassScore.Value);
}
Explanation result = new Explanation(score, "combined first and
second pass score using " + this.GetType());
@@ -165,13 +165,13 @@ namespace Lucene.Net.Search
result.AddDetail(first);
Explanation second;
- if (secondPassScore == null)
+ if (!secondPassScore.HasValue)
{
second = new Explanation(0.0f, "no second pass score");
}
else
{
- second = new Explanation((float)secondPassScore, "second pass
score");
+ second = new Explanation(secondPassScore.Value, "second pass
score");
}
second.AddDetail(secondPassExplanation);
result.AddDetail(second);
diff --git a/src/Lucene.Net/Search/SloppyPhraseScorer.cs
b/src/Lucene.Net/Search/SloppyPhraseScorer.cs
index 912615f..86ca76b 100644
--- a/src/Lucene.Net/Search/SloppyPhraseScorer.cs
+++ b/src/Lucene.Net/Search/SloppyPhraseScorer.cs
@@ -452,7 +452,7 @@ namespace Lucene.Net.Search
/// <summary>
/// Detect repetition groups. Done once - for first doc. </summary>
- private IList<IList<PhrasePositions>>
GatherRptGroups(JCG.LinkedDictionary<Term, int?> rptTerms)
+ private IList<IList<PhrasePositions>>
GatherRptGroups(JCG.LinkedDictionary<Term, int> rptTerms)
{
PhrasePositions[] rpp = RepeatingPPs(rptTerms);
IList<IList<PhrasePositions>> res = new
JCG.List<IList<PhrasePositions>>();
@@ -534,16 +534,16 @@ namespace Lucene.Net.Search
/// <summary>
/// Find repeating terms and assign them ordinal values </summary>
- private JCG.LinkedDictionary<Term, int?> RepeatingTerms()
+ private JCG.LinkedDictionary<Term, int> RepeatingTerms()
{
- JCG.LinkedDictionary<Term, int?> tord = new
JCG.LinkedDictionary<Term, int?>();
- Dictionary<Term, int?> tcnt = new Dictionary<Term, int?>();
+ JCG.LinkedDictionary<Term, int> tord = new
JCG.LinkedDictionary<Term, int>();
+ Dictionary<Term, int> tcnt = new Dictionary<Term, int>();
for (PhrasePositions pp = min, prev = null; prev != max; pp =
(prev = pp).next) // iterate cyclic list: done once handled max
{
foreach (Term t in pp.terms)
{
- tcnt.TryGetValue(t, out int? cnt0);
- int? cnt = cnt0 == null ? new int?(1) : new int?(1 +
(int)cnt0);
+ ;
+ int cnt = !tcnt.TryGetValue(t, out int cnt0) ? 1 : 1 +
cnt0;
tcnt[t] = cnt;
if (cnt == 2)
{
@@ -556,7 +556,7 @@ namespace Lucene.Net.Search
/// <summary>
/// Find repeating pps, and for each, if has multi-terms, update
this.hasMultiTermRpts </summary>
- private PhrasePositions[] RepeatingPPs(IDictionary<Term, int?>
rptTerms)
+ private PhrasePositions[] RepeatingPPs(IDictionary<Term, int> rptTerms)
{
IList<PhrasePositions> rp = new JCG.List<PhrasePositions>();
for (PhrasePositions pp = min, prev = null; prev != max; pp =
(prev = pp).next) // iterate cyclic list: done once handled max
@@ -576,7 +576,7 @@ namespace Lucene.Net.Search
/// <summary>
/// bit-sets - for each repeating pp, for each of its repeating terms,
the term ordinal values is set </summary>
- private static IList<FixedBitSet> PpTermsBitSets(PhrasePositions[]
rpp, IDictionary<Term, int?> tord) // LUCENENET: CA1822: Mark members as static
+ private static IList<FixedBitSet> PpTermsBitSets(PhrasePositions[]
rpp, IDictionary<Term, int> tord) // LUCENENET: CA1822: Mark members as static
{
IList<FixedBitSet> bb = new JCG.List<FixedBitSet>(rpp.Length);
foreach (PhrasePositions pp in rpp)
@@ -584,8 +584,8 @@ namespace Lucene.Net.Search
FixedBitSet b = new FixedBitSet(tord.Count);
foreach (var t in pp.terms)
{
- if (tord.TryGetValue(t, out int? ord) && ord.HasValue)
- b.Set((int)ord);
+ if (tord.TryGetValue(t, out int ord))
+ b.Set(ord);
}
bb.Add(b);
}
@@ -619,7 +619,7 @@ namespace Lucene.Net.Search
/// <summary>
/// Map each term to the single group that contains it </summary>
- private static IDictionary<Term, int>
TermGroups(JCG.LinkedDictionary<Term, int?> tord, IList<FixedBitSet> bb) //
LUCENENET: CA1822: Mark members as static
+ private static IDictionary<Term, int>
TermGroups(JCG.LinkedDictionary<Term, int> tord, IList<FixedBitSet> bb) //
LUCENENET: CA1822: Mark members as static
{
Dictionary<Term, int> tg = new Dictionary<Term, int>();
Term[] t = tord.Keys.ToArray(/*new Term[0]*/);
diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs
b/src/Lucene.Net/Util/Automaton/BasicOperations.cs
index 6366b69..e506a57 100644
--- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs
+++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs
@@ -680,7 +680,7 @@ namespace Lucene.Net.Util.Automaton
internal PointTransitions[] points = new PointTransitions[5];
private const int HASHMAP_CUTOVER = 30;
- private readonly Dictionary<int?, PointTransitions> map = new
Dictionary<int?, PointTransitions>();
+ private readonly Dictionary<int, PointTransitions> map = new
Dictionary<int, PointTransitions>();
private bool useHash = false;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -707,11 +707,10 @@ namespace Lucene.Net.Util.Automaton
{
if (useHash)
{
- int? pi = point;
- if (!map.TryGetValue(pi, out PointTransitions p))
+ if (!map.TryGetValue(point, out PointTransitions p))
{
p = Next(point);
- map[pi] = p;
+ map[point] = p;
}
return p;
}