This is an automated email from the ASF dual-hosted git repository.
nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
The following commit(s) were added to refs/heads/master by this push:
new 93cd1e9 BREAKING: Removed (more) unnecessary nullable value types
(see #574 and #581) (#583)
93cd1e9 is described below
commit 93cd1e99be6b904b6374c4d15d74e5a8ee89eb1c
Author: Shad Storhaug <[email protected]>
AuthorDate: Tue Dec 21 01:09:23 2021 +0700
BREAKING: Removed (more) unnecessary nullable value types (see #574 and
#581) (#583)
* BREAKING: Lucene.Net.Queries.Function.ValueSources.EnumFieldSource:
Removed nullable value types from public API
* Lucene.Net.Queries: Removed unnecessary nullable value types
* SWEEP: Removed several unnecessary nullable value type declarations in
the test framework and in tests
* BREAKING: Lucene.Net.QueryParsers.Flexible: Removed unnecessary nullable
value types from ConfigurationKeys and configuration setters/getters in
StandardQueryParser. Added AbstractQueryConfig.TryGetValue() method to allow
retrieving value types so they can be defaulted properly.
---
.../Function/ValueSources/DoubleFieldSource.cs | 2 +-
.../Function/ValueSources/EnumFieldSource.cs | 34 ++++++++-----------
.../Function/ValueSources/FloatFieldSource.cs | 2 +-
.../Function/ValueSources/IntFieldSource.cs | 2 +-
.../Function/ValueSources/ShortFieldSource.cs | 2 +-
.../Flexible/Core/Config/AbstractQueryConfig.cs | 35 +++++++++++++++++--
.../Standard/Config/FieldBoostMapFCListener.cs | 10 ++----
.../Config/FieldDateResolutionFCListener.cs | 20 +++--------
.../Standard/Config/StandardQueryConfigHandler.cs | 18 +++++-----
.../Processors/AnalyzerQueryNodeProcessor.cs | 11 +++---
.../Standard/Processors/BoostQueryNodeProcessor.cs | 6 ++--
.../DefaultPhraseSlopQueryNodeProcessor.cs | 6 ++--
.../LowercaseExpandedTermsQueryNodeProcessor.cs | 5 ++-
.../Flexible/Standard/StandardQueryParser.cs | 21 ++++++------
.../Analysis/BaseTokenStreamTestCase.cs | 12 +++----
.../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs | 4 +--
.../Index/BaseTermVectorsFormatTestCase.cs | 2 +-
.../Analysis/CharFilters/TestMappingCharFilter.cs | 4 +--
.../Analysis/Util/TestCharArraySet.cs | 6 ++--
.../SortedSet/TestSortedSetDocValuesFacets.cs | 12 +++----
.../Taxonomy/TestTaxonomyCombined.cs | 6 ++--
.../Taxonomy/TestTaxonomyFacetCounts.cs | 12 +++----
.../Taxonomy/TestTaxonomyFacetCounts2.cs | 38 ++++++++++-----------
.../Taxonomy/TestTaxonomyFacetSumValueSource.cs | 12 +++----
src/Lucene.Net.Tests.Facet/TestDrillSideways.cs | 10 +++---
.../Mlt/TestMoreLikeThis.cs | 39 ++++++++++------------
.../TestCustomScoreQuery.cs | 2 +-
.../Precedence/TestPrecedenceQueryParser.cs | 2 +-
.../Flexible/Standard/TestMultiFieldQPHelper.cs | 2 +-
.../Flexible/Standard/TestQPHelper.cs | 2 +-
src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs | 6 ++--
.../Prefix/TestRecursivePrefixTreeStrategy.cs | 6 ++--
.../Suggest/Analyzing/TestFreeTextSuggester.cs | 16 ++++-----
src/Lucene.Net.Tests/Index/TestDocTermOrds.cs | 6 ++--
.../Index/TestDocValuesWithThreads.cs | 12 +++----
src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs | 6 ++--
.../Index/TestDocumentsWriterDeleteQueue.cs | 12 +++----
.../Index/TestIndexWriterDelete.cs | 2 +-
src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs | 4 +--
src/Lucene.Net.Tests/Index/TestMixedCodecs.cs | 4 +--
src/Lucene.Net.Tests/Index/TestMultiDocValues.cs | 4 +--
src/Lucene.Net.Tests/Index/TestMultiFields.cs | 6 ++--
src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs | 6 ++--
src/Lucene.Net.Tests/Index/TestTermsEnum.cs | 4 +--
.../Search/TestCustomSearcherSort.cs | 12 +++----
src/Lucene.Net.Tests/Search/TestDocIdSet.cs | 6 ++--
.../Search/TestFieldCacheRangeFilter.cs | 28 ++++++++--------
src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs | 2 +-
src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs | 38 ++++++++++-----------
src/Lucene.Net.Tests/Util/TestBytesRefHash.cs | 4 +--
.../Util/TestDoubleBarrelLRUCache.cs | 4 +--
src/Lucene.Net.Tests/Util/TestMergedIterator.cs | 8 ++---
52 files changed, 267 insertions(+), 268 deletions(-)
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
b/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
index 55dd2f8..0165b0f 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
@@ -100,7 +100,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
public override int GetHashCode()
{
- int h = m_parser == null ? typeof(double?).GetHashCode() :
m_parser.GetType().GetHashCode();
+ int h = m_parser == null ? typeof(double).GetHashCode() :
m_parser.GetType().GetHashCode();
h += base.GetHashCode();
return h;
}
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
index 4981ca6..617e764 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
@@ -31,17 +31,17 @@ namespace Lucene.Net.Queries.Function.ValueSources
/// <summary>
/// Obtains <see cref="int"/> field values from <see
cref="IFieldCache.GetInt32s(AtomicReader, string, FieldCache.IInt32Parser,
bool)"/> and makes
/// those values available as other numeric types, casting as needed.
- /// StrVal of the value is not the <see cref="int"/> value, but its <see
cref="string"/> (displayed) value
+ /// StrVal of the value is not the <see cref="int"/> value, but its <see
cref="string"/> (displayed) value.
/// </summary>
public class EnumFieldSource : FieldCacheSource
{
private const int DEFAULT_VALUE = -1;
private readonly FieldCache.IInt32Parser parser;
- private readonly IDictionary<int?, string> enumIntToStringMap;
- private readonly IDictionary<string, int?> enumStringToIntMap;
+ private readonly IDictionary<int, string> enumIntToStringMap;
+ private readonly IDictionary<string, int> enumStringToIntMap;
- public EnumFieldSource(string field, FieldCache.IInt32Parser parser,
IDictionary<int?, string> enumIntToStringMap, IDictionary<string, int?>
enumStringToIntMap)
+ public EnumFieldSource(string field, FieldCache.IInt32Parser parser,
IDictionary<int, string> enumIntToStringMap, IDictionary<string, int>
enumStringToIntMap)
: base(field)
{
this.parser = parser;
@@ -54,15 +54,11 @@ namespace Lucene.Net.Queries.Function.ValueSources
/// <summary>
/// NOTE: This was intValueToStringValue() in Lucene
/// </summary>
- private string Int32ValueToStringValue(int? intVal)
+ private string Int32ValueToStringValue(int intVal)
{
- if (intVal == null)
- {
- return null;
- }
+ // LUCENENET: null value not applicable for value types (it
defaults to 0 anyway)
- string enumString = enumIntToStringMap[intVal];
- if (enumString != null)
+ if (enumIntToStringMap.TryGetValue(intVal, out string enumString))
{
return enumString;
}
@@ -80,8 +76,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
return null;
}
- int? enumInt = enumStringToIntMap[stringVal];
- if (enumInt != null) //enum int found for str
+ if (enumStringToIntMap.TryGetValue(stringVal, out int enumInt))
//enum int found for str
{
return enumInt;
}
@@ -91,8 +86,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
{
intValue = DEFAULT_VALUE;
}
- string enumString = enumIntToStringMap[intValue];
- if (enumString != null) //has matching str
+ if (enumIntToStringMap.ContainsKey(intValue)) //has matching str
{
return intValue;
}
@@ -163,7 +157,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
public override string StrVal(int doc)
{
- int? intValue = arr.Get(doc);
+ int intValue = arr.Get(doc);
return outerInstance.Int32ValueToStringValue(intValue);
}
@@ -255,11 +249,11 @@ namespace Lucene.Net.Queries.Function.ValueSources
// LUCENENET specific: must use
DictionaryEqualityComparer.Equals() to ensure values
// contained within the dictionaries are compared for equality
- if (!JCG.DictionaryEqualityComparer<int?,
string>.Default.Equals(enumIntToStringMap, that.enumIntToStringMap))
+ if (!JCG.DictionaryEqualityComparer<int,
string>.Default.Equals(enumIntToStringMap, that.enumIntToStringMap))
{
return false;
}
- if (!JCG.DictionaryEqualityComparer<string,
int?>.Default.Equals(enumStringToIntMap, that.enumStringToIntMap))
+ if (!JCG.DictionaryEqualityComparer<string,
int>.Default.Equals(enumStringToIntMap, that.enumStringToIntMap))
{
return false;
}
@@ -277,8 +271,8 @@ namespace Lucene.Net.Queries.Function.ValueSources
result = 31 * result + parser.GetHashCode();
// LUCENENET specific: must use
DictionaryEqualityComparer.GetHashCode() to ensure values
// contained within the dictionaries are compared for equality
- result = 31 * result + JCG.DictionaryEqualityComparer<int?,
string>.Default.GetHashCode(enumIntToStringMap);
- result = 31 * result + JCG.DictionaryEqualityComparer<string,
int?>.Default.GetHashCode(enumStringToIntMap);
+ result = 31 * result + JCG.DictionaryEqualityComparer<int,
string>.Default.GetHashCode(enumIntToStringMap);
+ result = 31 * result + JCG.DictionaryEqualityComparer<string,
int>.Default.GetHashCode(enumStringToIntMap);
return result;
}
}
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
b/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
index d56c9d9..7a8a559 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
@@ -107,7 +107,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
public override int GetHashCode()
{
- int h = m_parser == null ? typeof(float?).GetHashCode() :
m_parser.GetType().GetHashCode();
+ int h = m_parser == null ? typeof(float).GetHashCode() :
m_parser.GetType().GetHashCode();
h += base.GetHashCode();
return h;
}
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
b/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
index 457d73b..8cbc1cf 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
@@ -147,7 +147,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
public override int GetHashCode()
{
- int h = parser == null ? typeof(int?).GetHashCode() :
parser.GetType().GetHashCode();
+ int h = parser == null ? typeof(int).GetHashCode() :
parser.GetType().GetHashCode();
h += base.GetHashCode();
return h;
}
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
b/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
index 6e219b2..c47720f 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
@@ -133,7 +133,7 @@ namespace Lucene.Net.Queries.Function.ValueSources
public override int GetHashCode()
{
- var h = parser == null ? typeof(short?).GetHashCode() :
parser.GetType().GetHashCode();
+ var h = parser == null ? typeof(short).GetHashCode() :
parser.GetType().GetHashCode();
h += base.GetHashCode();
return h;
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs
b/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs
index f0ab7c7..879d5ff 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs
@@ -39,6 +39,32 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Config
}
/// <summary>
+ /// Gets the value associated with the specified key.
+ /// </summary>
+ /// <typeparam name="T">the value's type</typeparam>
+ /// <param name="key">the key, cannot be <c>null</c></param>
+ /// <param name="value">When this method returns, contains the value
associated with the specified key,
+ /// if the key is found; otherwise, the default value for the type of
the <paramref name="value"/> parameter.
+ /// This parameter is passed uninitialized.</param>
+ /// <returns><c>true</c> if the configuration contains an element with
the specified <paramref name="key"/>; otherwise, <c>false</c>.</returns>
+ // LUCENENET specific - using this method allows us to store
non-nullable value types
+ public virtual bool TryGetValue<T>(ConfigurationKey<T> key, out T
value)
+ {
+ if (key is null)
+ throw new ArgumentNullException(nameof(key), "key cannot be
null!");
+ if (this.configMap.TryGetValue(key, out object resultObj))
+ {
+ if (typeof(T).IsValueType)
+ value = ((T[])resultObj)[0]; // LUCENENET: Retrieve a 1
dimensionsal array for value types to avoid unboxing
+ else
+ value = (T)resultObj;
+ return true;
+ }
+ value = default;
+ return false;
+ }
+
+ /// <summary>
/// Returns the value held by the given key.
/// </summary>
/// <typeparam name="T">the value's type</typeparam>
@@ -50,8 +76,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Config
{
throw new ArgumentNullException(nameof(key), "key cannot be
null!"); // LUCENENET specific - changed from IllegalArgumentException to
ArgumentNullException (.NET convention)
}
- this.configMap.TryGetValue(key, out object result);
- return result == null ? default : (T)result;
+ return !this.configMap.TryGetValue(key, out object result) ||
result is null ? default :
+ // LUCENENET: Retrieve a 1 dimensionsal array for value types
to avoid unboxing
+ (typeof(T).IsValueType ? ((T[])result)[0] : (T)result);
}
/// <summary>
@@ -87,6 +114,10 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Config
{
Unset(key);
}
+ else if (typeof(T).IsValueType)
+ {
+ this.configMap[key] = new T[] { value }; // LUCENENET: Store a
1 dimensionsal array for value types to avoid boxing
+ }
else
{
this.configMap[key] = value;
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs
index 0679361..c9ba050 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs
@@ -41,15 +41,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
public virtual void BuildFieldConfig(FieldConfig fieldConfig)
{
- IDictionary<string, float?> fieldBoostMap =
this.config.Get(ConfigurationKeys.FIELD_BOOST_MAP);
-
- if (fieldBoostMap != null)
- {
- if (fieldBoostMap.TryGetValue(fieldConfig.Field, out float?
boost) && boost != null)
- {
+ if (this.config.TryGetValue(ConfigurationKeys.FIELD_BOOST_MAP, out
IDictionary<string, float> fieldBoostMap)
+ && fieldBoostMap.TryGetValue(fieldConfig.Field, out float
boost))
fieldConfig.Set(ConfigurationKeys.BOOST, boost);
- }
- }
}
}
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
index 322cedf..c6fdb1e 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs
@@ -42,22 +42,12 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
public virtual void BuildFieldConfig(FieldConfig fieldConfig)
{
- DateResolution? dateRes = null;
- IDictionary<string, DateResolution?> dateResMap =
this.config.Get(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP);
-
- if (dateResMap != null)
- {
- dateResMap.TryGetValue(fieldConfig.Field, out dateRes);
- }
-
- if (dateRes == null)
- {
- dateRes = this.config.Get(ConfigurationKeys.DATE_RESOLUTION);
- }
-
- if (dateRes != null)
+ // LUCENENET: Simplified logic using TryGetValue
+ if
((this.config.TryGetValue(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, out
IDictionary<string, DateResolution> dateResMap)
+ && dateResMap.TryGetValue(fieldConfig.Field, out
DateResolution dateRes))
+ || this.config.TryGetValue(ConfigurationKeys.DATE_RESOLUTION,
out dateRes))
{
- fieldConfig.Set(ConfigurationKeys.DATE_RESOLUTION,
dateRes.Value);
+ fieldConfig.Set(ConfigurationKeys.DATE_RESOLUTION, dateRes);
}
}
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
index 51d0529..314c2b5 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/StandardQueryConfigHandler.cs
@@ -50,11 +50,11 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
Set(ConfigurationKeys.PHRASE_SLOP, 0); //default value 2.4
Set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, true); //default
value 2.4
Set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, false);
//default value 2.4
- Set(ConfigurationKeys.FIELD_BOOST_MAP, new
JCG.LinkedDictionary<string, float?>());
+ Set(ConfigurationKeys.FIELD_BOOST_MAP, new
JCG.LinkedDictionary<string, float>());
Set(ConfigurationKeys.FUZZY_CONFIG, new FuzzyConfig());
Set(ConfigurationKeys.LOCALE, null);
Set(ConfigurationKeys.MULTI_TERM_REWRITE_METHOD,
MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
- Set(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, new
JCG.Dictionary<string, DateResolution?>());
+ Set(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, new
JCG.Dictionary<string, DateResolution>());
}
/// <summary>
@@ -76,19 +76,19 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
/// Key used to set whether position increments is enabled
/// </summary>
/// <seealso cref="StandardQueryParser.EnablePositionIncrements"/>
- public readonly static ConfigurationKey<bool?>
ENABLE_POSITION_INCREMENTS = ConfigurationKey.NewInstance<bool?>();
+ public readonly static ConfigurationKey<bool>
ENABLE_POSITION_INCREMENTS = ConfigurationKey.NewInstance<bool>();
/// <summary>
/// Key used to set whether expanded terms should be lower-cased
/// </summary>
/// <seealso cref="StandardQueryParser.LowercaseExpandedTerms"/>
- public readonly static ConfigurationKey<bool?>
LOWERCASE_EXPANDED_TERMS = ConfigurationKey.NewInstance<bool?>();
+ public readonly static ConfigurationKey<bool> LOWERCASE_EXPANDED_TERMS
= ConfigurationKey.NewInstance<bool>();
/// <summary>
/// Key used to set whether leading wildcards are supported
/// </summary>
/// <seealso cref="StandardQueryParser.AllowLeadingWildcard"/>
- public readonly static ConfigurationKey<bool?> ALLOW_LEADING_WILDCARD
= ConfigurationKey.NewInstance<bool?>();
+ public readonly static ConfigurationKey<bool> ALLOW_LEADING_WILDCARD =
ConfigurationKey.NewInstance<bool>();
/// <summary>
/// Key used to set the <see cref="Analyzer"/> used for terms found in
the query
@@ -106,7 +106,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
/// Key used to set the default phrase slop
/// </summary>
/// <seealso cref="StandardQueryParser.PhraseSlop"/>
- public readonly static ConfigurationKey<int?> PHRASE_SLOP =
ConfigurationKey.NewInstance<int?>();
+ public readonly static ConfigurationKey<int> PHRASE_SLOP =
ConfigurationKey.NewInstance<int>();
/// <summary>
/// Key used to set the <see cref="CultureInfo">locale</see> used
when parsing the query
@@ -134,14 +134,14 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
/// Key used to set a field to boost map that is used to set the boost
for each field
/// </summary>
/// <seealso cref="StandardQueryParser.FieldsBoost"/>
- public readonly static ConfigurationKey<IDictionary<string, float?>>
FIELD_BOOST_MAP = ConfigurationKey.NewInstance<IDictionary<string, float?>>();
+ public readonly static ConfigurationKey<IDictionary<string, float>>
FIELD_BOOST_MAP = ConfigurationKey.NewInstance<IDictionary<string, float>>();
/// <summary>
/// Key used to set a field to <see cref="DateResolution"/> map that
is used
/// to normalize each date field value.
/// </summary>
/// <seealso cref="StandardQueryParser.DateResolutionMap"/>
- public readonly static ConfigurationKey<IDictionary<string,
DateResolution?>> FIELD_DATE_RESOLUTION_MAP =
ConfigurationKey.NewInstance<IDictionary<string, DateResolution?>>();
+ public readonly static ConfigurationKey<IDictionary<string,
DateResolution>> FIELD_DATE_RESOLUTION_MAP =
ConfigurationKey.NewInstance<IDictionary<string, DateResolution>>();
/// <summary>
/// Key used to set the <see cref="FuzzyConfig"/> used to create fuzzy
queries.
@@ -161,7 +161,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config
/// Key used to set the boost value in <see cref="FieldConfig"/>
objects.
/// </summary>
/// <seealso cref="StandardQueryParser.FieldsBoost"/>
- public readonly static ConfigurationKey<float?> BOOST =
ConfigurationKey.NewInstance<float?>();
+ public readonly static ConfigurationKey<float> BOOST =
ConfigurationKey.NewInstance<float>();
/// <summary>
/// Key used to set a field to its <see cref="NumericConfig"/>.
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
index 65c48e4..e357b50 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs
@@ -68,23 +68,22 @@ namespace
Lucene.Net.QueryParsers.Flexible.Standard.Processors
public override IQueryNode Process(IQueryNode queryTree)
{
- Analyzer analyzer =
GetQueryConfigHandler().Get(ConfigurationKeys.ANALYZER);
+ var queryConfigHandler = GetQueryConfigHandler();
+ Analyzer analyzer =
queryConfigHandler.Get(ConfigurationKeys.ANALYZER);
if (analyzer != null)
{
this.analyzer = analyzer;
this.positionIncrementsEnabled = false;
- bool? positionIncrementsEnabled =
GetQueryConfigHandler().Get(ConfigurationKeys.ENABLE_POSITION_INCREMENTS);
// LUCENENET specific - rather than using null, we are relying
on the behavior that the default
// value for an enum is 0 (OR in this case).
- //var defaultOperator =
GetQueryConfigHandler().Get(ConfigurationKeys.DEFAULT_OPERATOR);
- //this.defaultOperator = defaultOperator != null ?
defaultOperator.Value : Operator.OR;
this.defaultOperator =
GetQueryConfigHandler().Get(ConfigurationKeys.DEFAULT_OPERATOR);
- if (positionIncrementsEnabled != null)
+ // LUCENENET: Use TryGetValue() to determine if the value
exists
+ if
(GetQueryConfigHandler().TryGetValue(ConfigurationKeys.ENABLE_POSITION_INCREMENTS,
out bool positionIncrementsEnabled))
{
- this.positionIncrementsEnabled =
positionIncrementsEnabled.Value;
+ this.positionIncrementsEnabled = positionIncrementsEnabled;
}
if (this.analyzer != null)
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs
index 9a9342b..a51de03 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs
@@ -48,11 +48,9 @@ namespace
Lucene.Net.QueryParsers.Flexible.Standard.Processors
if (fieldConfig != null)
{
- float? boost =
fieldConfig.Get(ConfigurationKeys.BOOST);
-
- if (boost != null)
+ if (fieldConfig.TryGetValue(ConfigurationKeys.BOOST,
out float boost))
{
- return new BoostQueryNode(node, boost.Value);
+ return new BoostQueryNode(node, boost);
}
}
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/DefaultPhraseSlopQueryNodeProcessor.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/DefaultPhraseSlopQueryNodeProcessor.cs
index adf9511..43ea864 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/DefaultPhraseSlopQueryNodeProcessor.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/DefaultPhraseSlopQueryNodeProcessor.cs
@@ -51,11 +51,9 @@ namespace
Lucene.Net.QueryParsers.Flexible.Standard.Processors
if (queryConfig != null)
{
- int? defaultPhraseSlop =
queryConfig.Get(ConfigurationKeys.PHRASE_SLOP);
-
- if (defaultPhraseSlop != null)
+ if (queryConfig.TryGetValue(ConfigurationKeys.PHRASE_SLOP, out
int defaultPhraseSlop))
{
- this.defaultPhraseSlop = defaultPhraseSlop.Value;
+ this.defaultPhraseSlop = defaultPhraseSlop;
return base.Process(queryTree);
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
index f91ee57..fb907e7 100644
---
a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
+++
b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/LowercaseExpandedTermsQueryNodeProcessor.cs
@@ -43,9 +43,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors
public override IQueryNode Process(IQueryNode queryTree)
{
- bool? lowercaseExpandedTerms =
GetQueryConfigHandler().Get(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS);
-
- if (lowercaseExpandedTerms != null && lowercaseExpandedTerms.Value)
+ if
(GetQueryConfigHandler().TryGetValue(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS,
out bool lowercaseExpandedTerms)
+ && lowercaseExpandedTerms)
{
return base.Process(queryTree);
}
diff --git
a/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
b/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
index 20b326d..52c6733 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs
@@ -160,7 +160,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// </summary>
public virtual Operator DefaultOperator
{
- get => QueryConfigHandler.Get(ConfigurationKeys.DEFAULT_OPERATOR);
+ get => QueryConfigHandler.Get(ConfigurationKeys.DEFAULT_OPERATOR);
// LUCENENET: The default value is OR, so we just rely on the compiler if it
doesn't exist
set => QueryConfigHandler.Set(ConfigurationKeys.DEFAULT_OPERATOR,
value);
}
@@ -175,7 +175,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// </summary>
public virtual bool LowercaseExpandedTerms
{
- get =>
QueryConfigHandler.Get(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS) ?? true;
+ get =>
QueryConfigHandler.TryGetValue(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, out
bool value) ? value : true;
set =>
QueryConfigHandler.Set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, value);
}
@@ -190,7 +190,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// </summary>
public virtual bool AllowLeadingWildcard
{
- get =>
QueryConfigHandler.Get(ConfigurationKeys.ALLOW_LEADING_WILDCARD) ?? false;
+ get =>
QueryConfigHandler.Get(ConfigurationKeys.ALLOW_LEADING_WILDCARD); // LUCENENET:
The default value is false, so we just rely on the compiler if it doesn't exist
set =>
QueryConfigHandler.Set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, value);
}
@@ -205,7 +205,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// </summary>
public virtual bool EnablePositionIncrements
{
- get =>
QueryConfigHandler.Get(ConfigurationKeys.ENABLE_POSITION_INCREMENTS) ?? false;
+ get =>
QueryConfigHandler.Get(ConfigurationKeys.ENABLE_POSITION_INCREMENTS); //
LUCENENET: The default value is false, so we just rely on the compiler if it
doesn't exist
set =>
QueryConfigHandler.Set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, value);
}
@@ -333,7 +333,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// </summary>
public virtual int PhraseSlop
{
- get => QueryConfigHandler.Get(ConfigurationKeys.PHRASE_SLOP) ?? 0;
+ get => QueryConfigHandler.Get(ConfigurationKeys.PHRASE_SLOP); //
LUCENENET: The default value is 0, so we just rely on the compiler if it
doesn't exist
set => QueryConfigHandler.Set(ConfigurationKeys.PHRASE_SLOP,
value);
}
@@ -369,7 +369,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// <summary>
/// Gets or Sets the field to boost map used to set boost for each
field.
/// </summary>
- public virtual IDictionary<string, float?> FieldsBoost
+ public virtual IDictionary<string, float> FieldsBoost
{
get => QueryConfigHandler.Get(ConfigurationKeys.FIELD_BOOST_MAP);
set => QueryConfigHandler.Set(ConfigurationKeys.FIELD_BOOST_MAP,
value);
@@ -390,22 +390,23 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
/// Gets the default <see cref="Documents.DateResolution"/> used for
certain field when
/// no <see cref="Documents.DateResolution"/> is defined for this
field.
/// </summary>
- public virtual DateResolution DateResolution =>
QueryConfigHandler.Get(ConfigurationKeys.DATE_RESOLUTION);
+ [ExceptionToNullableEnumConvention]
+ public virtual DateResolution? DateResolution =>
QueryConfigHandler.TryGetValue(ConfigurationKeys.DATE_RESOLUTION, out
DateResolution value) ? value : null;
/// <summary>
/// Sets the <see cref="Documents.DateResolution"/> used for each field
/// </summary>
/// <param name="dateRes">a collection that maps a field to its <see
cref="Documents.DateResolution"/></param>
[Obsolete("Use DateResolutionMap property instead.")]
- public virtual void SetDateResolution(IDictionary<string,
DateResolution?> dateRes)
+ public virtual void SetDateResolution(IDictionary<string,
DateResolution> dateRes)
{
DateResolutionMap = dateRes;
}
/// <summary>
- /// Gets or Sets the field to <see cref="T:DateResolution?"/> map used
to normalize each date field.
+ /// Gets or Sets the field to <see cref="Documents.DateResolution"/>
map used to normalize each date field.
/// </summary>
- public virtual IDictionary<string, DateResolution?> DateResolutionMap
+ public virtual IDictionary<string, DateResolution> DateResolutionMap
{
get =>
QueryConfigHandler.Get(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP);
set =>
QueryConfigHandler.Set(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, value);
diff --git a/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
b/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
index 67e0fc0..0311b7f 100644
--- a/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs
@@ -199,8 +199,8 @@ namespace Lucene.Net.Analysis
// *********** End From Lucene 8.2.0 **************
// Maps position to the start/end offset:
- IDictionary<int?, int?> posToStartOffset = new
Dictionary<int?, int?>();
- IDictionary<int?, int?> posToEndOffset = new Dictionary<int?,
int?>();
+ IDictionary<int, int> posToStartOffset = new Dictionary<int,
int>();
+ IDictionary<int, int> posToEndOffset = new Dictionary<int,
int>();
ts.Reset();
int pos = -1;
@@ -309,7 +309,7 @@ namespace Lucene.Net.Analysis
int posLength = posLengthAtt.PositionLength;
- if (!posToStartOffset.TryGetValue(pos, out int?
oldStartOffset))
+ if (!posToStartOffset.TryGetValue(pos, out int
oldStartOffset))
{
// First time we've seen a token leaving from
this position:
posToStartOffset[pos] = startOffset;
@@ -320,12 +320,12 @@ namespace Lucene.Net.Analysis
// We've seen a token leaving from this
position
// before; verify the startOffset is the same:
//System.out.println(" + vs " + pos + " -> "
+ startOffset);
-
Assert.AreEqual(oldStartOffset.GetValueOrDefault(), startOffset, "pos=" + pos +
" posLen=" + posLength + " token=" + termAtt);
+ Assert.AreEqual(oldStartOffset, startOffset,
"pos=" + pos + " posLen=" + posLength + " token=" + termAtt);
}
int endPos = pos + posLength;
- if (!posToEndOffset.TryGetValue(endPos, out int?
oldEndOffset))
+ if (!posToEndOffset.TryGetValue(endPos, out int
oldEndOffset))
{
// First time we've seen a token arriving to
this position:
posToEndOffset[endPos] = endOffset;
@@ -336,7 +336,7 @@ namespace Lucene.Net.Analysis
// We've seen a token arriving to this position
// before; verify the endOffset is the same:
//System.out.println(" + ve " + endPos + " ->
" + endOffset);
-
Assert.AreEqual(oldEndOffset.GetValueOrDefault(), endOffset, "pos=" + pos + "
posLen=" + posLength + " token=" + termAtt);
+ Assert.AreEqual(oldEndOffset, endOffset,
"pos=" + pos + " posLen=" + posLength + " token=" + termAtt);
}
}
}
diff --git
a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
index 4f83a6f..9465e23 100644
--- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
@@ -569,7 +569,7 @@ namespace Lucene.Net.Codecs.RAMOnly
}
// Holds all indexes created, keyed by the ID assigned in
fieldsConsumer
- private readonly IDictionary<int?, RAMPostings> state = new
Dictionary<int?, RAMPostings>();
+ private readonly IDictionary<int, RAMPostings> state = new
Dictionary<int, RAMPostings>();
private readonly AtomicInt64 nextID = new AtomicInt64();
@@ -650,7 +650,7 @@ namespace Lucene.Net.Codecs.RAMOnly
UninterruptableMonitor.Enter(state);
try
{
- return state[id];
+ return state.TryGetValue(id, out RAMPostings value) ? value :
null;
}
finally
{
diff --git
a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
index 46f928c..7bd5613 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
@@ -828,7 +828,7 @@ namespace Lucene.Net.Index
RandomDocumentFactory docFactory = new RandomDocumentFactory(this,
5, 20);
int numDocs = AtLeast(100);
int numDeletes = Random.Next(numDocs);
- ISet<int?> deletes = new JCG.HashSet<int?>();
+ ISet<int> deletes = new JCG.HashSet<int>();
while (deletes.Count < numDeletes)
{
deletes.Add(Random.Next(numDocs));
diff --git
a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
index b6b6cea..fd0a5ae 100644
---
a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
+++
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs
@@ -344,7 +344,7 @@ namespace Lucene.Net.Analysis.CharFilters
StringBuilder output = new StringBuilder();
// Maps output offset to input offset:
- IList<int?> inputOffsets = new JCG.List<int?>();
+ IList<int> inputOffsets = new JCG.List<int>();
int cumDiff = 0;
int charIdx = 0;
@@ -446,7 +446,7 @@ namespace Lucene.Net.Analysis.CharFilters
MappingCharFilter mapFilter = new
MappingCharFilter(charMap, new StringReader(content));
StringBuilder actualBuilder = new StringBuilder();
- IList<int?> actualInputOffsets = new JCG.List<int?>();
+ IList<int> actualInputOffsets = new JCG.List<int>();
// Now consume the actual mapFilter, somewhat randomly:
while (true)
diff --git
a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
index 8492e10..0101842 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharArraySet.cs
@@ -70,16 +70,16 @@ namespace Lucene.Net.Analysis.Util
public virtual void TestObjectContains()
{
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 10,
true);
- int? val = Convert.ToInt32(1);
+ J2N.Numerics.Int32 val = J2N.Numerics.Int32.GetInstance(1);
set.Add(val);
assertTrue(set.Contains(val));
- assertTrue(set.Contains(new int?(1))); // another integer
+ assertTrue(set.Contains(J2N.Numerics.Int32.GetInstance(1))); //
another integer
assertTrue(set.Contains("1"));
assertTrue(set.Contains(new char[] { '1' }));
// test unmodifiable
set = CharArraySet.UnmodifiableSet(set);
assertTrue(set.Contains(val));
- assertTrue(set.Contains(new int?(1))); // another integer
+ assertTrue(set.Contains(J2N.Numerics.Int32.GetInstance(1))); //
another integer
assertTrue(set.Contains("1"));
assertTrue(set.Contains(new char[] { '1' }));
}
diff --git
a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
index 1e546ee..430db02 100644
--- a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
+++ b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
@@ -354,10 +354,10 @@ namespace Lucene.Net.Facet.SortedSet
Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
// Slow, yet hopefully bug-free, faceting:
- var expectedCounts = new JCG.List<Dictionary<string, int?>>();
+ var expectedCounts = new JCG.List<Dictionary<string, int>>();
for (int i = 0; i < numDims; i++)
{
- expectedCounts.Add(new Dictionary<string, int?>());
+ expectedCounts.Add(new Dictionary<string, int>());
}
foreach (TestDoc doc in testDocs)
@@ -368,7 +368,7 @@ namespace Lucene.Net.Facet.SortedSet
{
if (doc.dims[j] != null)
{
- if
(!expectedCounts[j].TryGetValue(doc.dims[j], out int? v))
+ if
(!expectedCounts[j].TryGetValue(doc.dims[j], out int v))
{
expectedCounts[j][doc.dims[j]] = 1;
}
@@ -386,10 +386,10 @@ namespace Lucene.Net.Facet.SortedSet
{
JCG.List<LabelAndValue> labelValues = new
JCG.List<LabelAndValue>();
int totCount = 0;
- foreach (KeyValuePair<string, int?> ent in
expectedCounts[i])
+ foreach (KeyValuePair<string, int> ent in
expectedCounts[i])
{
- labelValues.Add(new LabelAndValue(ent.Key,
ent.Value.Value));
- totCount += ent.Value.Value;
+ labelValues.Add(new LabelAndValue(ent.Key, ent.Value));
+ totCount += ent.Value;
}
SortLabelValues(labelValues);
if (totCount > 0)
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index 87cfab2..d66a473 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -592,7 +592,7 @@ namespace Lucene.Net.Facet.Taxonomy
{
// find expected children by looking at all expectedCategories
// for children
- JCG.List<int?> expectedChildren = new JCG.List<int?>();
+ JCG.List<int> expectedChildren = new JCG.List<int>();
for (int j = ExpectedCategories.Length - 1; j >= 0; j--)
{
if (ExpectedCategories[j].Length !=
ExpectedCategories[i].Length + 1)
@@ -622,11 +622,11 @@ namespace Lucene.Net.Facet.Taxonomy
else
{
int child = youngestChildArray[i];
- Assert.AreEqual((int)expectedChildren[0], child);
+ Assert.AreEqual(expectedChildren[0], child);
for (int j = 1; j < expectedChildren.Count; j++)
{
child = olderSiblingArray[child];
- Assert.AreEqual((int)expectedChildren[j], child);
+ Assert.AreEqual(expectedChildren[j], child);
// if child is INVALID_ORDINAL we should stop, but
// AssertEquals would fail in this case anyway.
}
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index c13e000..b9caef7 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -833,10 +833,10 @@ namespace Lucene.Net.Facet.Taxonomy
Facets facets = GetTaxonomyFacetCounts(tr, config, fc);
// Slow, yet hopefully bug-free, faceting:
- var expectedCounts = new JCG.List<Dictionary<string, int?>>();
+ var expectedCounts = new JCG.List<Dictionary<string, int>>();
for (int i = 0; i < numDims; i++)
{
- expectedCounts.Add(new Dictionary<string, int?>());
+ expectedCounts.Add(new Dictionary<string, int>());
}
foreach (TestDoc doc in testDocs)
@@ -847,7 +847,7 @@ namespace Lucene.Net.Facet.Taxonomy
{
if (doc.dims[j] != null)
{
- if
(!expectedCounts[j].TryGetValue(doc.dims[j], out int? v) || v == null)
+ if
(!expectedCounts[j].TryGetValue(doc.dims[j], out int v))
{
expectedCounts[j][doc.dims[j]] = 1;
}
@@ -865,10 +865,10 @@ namespace Lucene.Net.Facet.Taxonomy
{
JCG.List<LabelAndValue> labelValues = new
JCG.List<LabelAndValue>();
int totCount = 0;
- foreach (KeyValuePair<string, int?> ent in
expectedCounts[i])
+ foreach (KeyValuePair<string, int> ent in
expectedCounts[i])
{
- labelValues.Add(new LabelAndValue(ent.Key,
ent.Value.Value));
- totCount += ent.Value.Value;
+ labelValues.Add(new LabelAndValue(ent.Key, ent.Value));
+ totCount += ent.Value;
}
SortLabelValues(labelValues);
if (totCount > 0)
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
index 035c20c..c7d1c5a 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
@@ -85,7 +85,7 @@ namespace Lucene.Net.Facet.Taxonomy
}
private static Net.Store.Directory indexDir, taxoDir;
- private static IDictionary<string, int?> allExpectedCounts,
termExpectedCounts;
+ private static IDictionary<string, int> allExpectedCounts,
termExpectedCounts;
[OneTimeTearDown]
public override void AfterClass() // LUCENENET specific - renamed from
AfterClassCountingFacetsAggregatorTest() to ensure calling order
@@ -170,7 +170,7 @@ namespace Lucene.Net.Facet.Taxonomy
indexWriter.Commit(); // flush a segment
}
- private static void IndexDocsWithFacetsNoTerms(IndexWriter
indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int?>
expectedCounts)
+ private static void IndexDocsWithFacetsNoTerms(IndexWriter
indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int>
expectedCounts)
{
Random random = Random;
int numDocs = AtLeast(random, 2);
@@ -184,7 +184,7 @@ namespace Lucene.Net.Facet.Taxonomy
indexWriter.Commit(); // flush a segment
}
- private static void IndexDocsWithFacetsAndTerms(IndexWriter
indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int?>
expectedCounts)
+ private static void IndexDocsWithFacetsAndTerms(IndexWriter
indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int>
expectedCounts)
{
Random random = Random;
int numDocs = AtLeast(random, 2);
@@ -199,7 +199,7 @@ namespace Lucene.Net.Facet.Taxonomy
indexWriter.Commit(); // flush a segment
}
- private static void IndexDocsWithFacetsAndSomeTerms(IndexWriter
indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int?>
expectedCounts)
+ private static void IndexDocsWithFacetsAndSomeTerms(IndexWriter
indexWriter, ITaxonomyWriter taxoWriter, IDictionary<string, int>
expectedCounts)
{
Random random = Random;
int numDocs = AtLeast(random, 2);
@@ -219,9 +219,9 @@ namespace Lucene.Net.Facet.Taxonomy
}
// initialize expectedCounts w/ 0 for all categories
- private static IDictionary<string, int?> newCounts()
+ private static IDictionary<string, int> newCounts()
{
- IDictionary<string, int?> counts = new Dictionary<string, int?>();
+ IDictionary<string, int> counts = new Dictionary<string, int>();
counts[CP_A] = 0;
counts[CP_B] = 0;
counts[CP_C] = 0;
@@ -298,13 +298,13 @@ namespace Lucene.Net.Facet.Taxonomy
Assert.AreEqual(-1, (int)result.Value);
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(termExpectedCounts[CP_A + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(termExpectedCounts[CP_A + "/" +
labelValue.Label], labelValue.Value);
}
result = facets.GetTopChildren(NUM_CHILDREN_CP_B, CP_B);
- Assert.AreEqual(termExpectedCounts[CP_B].GetValueOrDefault(),
result.Value);
+ Assert.AreEqual(termExpectedCounts[CP_B], result.Value);
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(termExpectedCounts[CP_B + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(termExpectedCounts[CP_B + "/" +
labelValue.Label], labelValue.Value);
}
IOUtils.Dispose(indexReader, taxoReader);
@@ -327,17 +327,17 @@ namespace Lucene.Net.Facet.Taxonomy
int prevValue = int.MaxValue;
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(allExpectedCounts[CP_A + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(allExpectedCounts[CP_A + "/" +
labelValue.Label], labelValue.Value);
Assert.IsTrue((int)labelValue.Value <= prevValue, "wrong sort
order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" +
prevValue);
prevValue = (int)labelValue.Value;
}
result = facets.GetTopChildren(NUM_CHILDREN_CP_B, CP_B);
- Assert.AreEqual(allExpectedCounts[CP_B].GetValueOrDefault(),
result.Value);
+ Assert.AreEqual(allExpectedCounts[CP_B], result.Value);
prevValue = int.MaxValue;
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(allExpectedCounts[CP_B + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(allExpectedCounts[CP_B + "/" +
labelValue.Label], labelValue.Value);
Assert.IsTrue((int)labelValue.Value <= prevValue, "wrong sort
order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" +
prevValue);
prevValue = (int)labelValue.Value;
}
@@ -361,13 +361,13 @@ namespace Lucene.Net.Facet.Taxonomy
Assert.AreEqual(-1, (int)result.Value);
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(allExpectedCounts[CP_A + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(allExpectedCounts[CP_A + "/" +
labelValue.Label], labelValue.Value);
}
result = facets.GetTopChildren(int.MaxValue, CP_B);
- Assert.AreEqual(allExpectedCounts[CP_B].GetValueOrDefault(),
result.Value);
+ Assert.AreEqual(allExpectedCounts[CP_B], result.Value);
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(allExpectedCounts[CP_B + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(allExpectedCounts[CP_B + "/" +
labelValue.Label], labelValue.Value);
}
IOUtils.Dispose(indexReader, taxoReader);
@@ -386,16 +386,16 @@ namespace Lucene.Net.Facet.Taxonomy
Facets facets = GetTaxonomyFacetCounts(taxoReader, GetConfig(),
sfc);
FacetResult result = facets.GetTopChildren(NUM_CHILDREN_CP_C,
CP_C);
- Assert.AreEqual(allExpectedCounts[CP_C].GetValueOrDefault(),
result.Value);
+ Assert.AreEqual(allExpectedCounts[CP_C], result.Value);
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(allExpectedCounts[CP_C + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(allExpectedCounts[CP_C + "/" +
labelValue.Label], labelValue.Value);
}
result = facets.GetTopChildren(NUM_CHILDREN_CP_D, CP_D);
- Assert.AreEqual(allExpectedCounts[CP_C].GetValueOrDefault(),
result.Value);
+ Assert.AreEqual(allExpectedCounts[CP_C], result.Value);
foreach (LabelAndValue labelValue in result.LabelValues)
{
- Assert.AreEqual(allExpectedCounts[CP_D + "/" +
labelValue.Label].GetValueOrDefault(), labelValue.Value);
+ Assert.AreEqual(allExpectedCounts[CP_D + "/" +
labelValue.Label], labelValue.Value);
}
IOUtils.Dispose(indexReader, taxoReader);
diff --git
a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
index 8202161..e0b44a7 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
@@ -555,10 +555,10 @@ namespace Lucene.Net.Facet.Taxonomy
Facets facets = new TaxonomyFacetSumValueSource(tr, config,
fc, values);
// Slow, yet hopefully bug-free, faceting:
- var expectedValues = new JCG.List<Dictionary<string,
float?>>(numDims);
+ var expectedValues = new JCG.List<Dictionary<string,
float>>(numDims);
for (int i = 0; i < numDims; i++)
{
- expectedValues.Add(new Dictionary<string, float?>());
+ expectedValues.Add(new Dictionary<string, float>());
}
foreach (TestDoc doc in testDocs)
@@ -569,7 +569,7 @@ namespace Lucene.Net.Facet.Taxonomy
{
if (doc.dims[j] != null)
{
- if
(!expectedValues[j].TryGetValue(doc.dims[j], out float? v) || v == null)
+ if
(!expectedValues[j].TryGetValue(doc.dims[j], out float v))
{
expectedValues[j][doc.dims[j]] = doc.value;
}
@@ -587,10 +587,10 @@ namespace Lucene.Net.Facet.Taxonomy
{
JCG.List<LabelAndValue> labelValues = new
JCG.List<LabelAndValue>();
float totValue = 0;
- foreach (KeyValuePair<string, float?> ent in
expectedValues[i])
+ foreach (KeyValuePair<string, float> ent in
expectedValues[i])
{
- labelValues.Add(new LabelAndValue(ent.Key,
ent.Value.Value));
- totValue += ent.Value.Value;
+ labelValues.Add(new LabelAndValue(ent.Key, ent.Value));
+ totValue += ent.Value;
}
SortLabelValues(labelValues);
if (totValue > 0)
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
index f82ef09..b9f436b 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
@@ -796,7 +796,7 @@ namespace Lucene.Net.Facet
DrillSidewaysResult actual = ds.Search(ddq, filter, null,
numDocs, sort, true, true);
TopDocs hits = s.Search(baseQuery, numDocs);
- IDictionary<string, float?> scores = new Dictionary<string,
float?>();
+ IDictionary<string, float> scores = new Dictionary<string,
float>();
foreach (ScoreDoc sd in hits.ScoreDocs)
{
scores[s.Doc(sd.Doc).Get("id")] = sd.Score;
@@ -1135,7 +1135,7 @@ namespace Lucene.Net.Facet
}
//nextDocBreak:// Not referenced
- IDictionary<string, int?> idToDocID = new Dictionary<string,
int?>();
+ IDictionary<string, int> idToDocID = new Dictionary<string, int>();
for (int i = 0; i < s.IndexReader.MaxDoc; i++)
{
idToDocID[s.Doc(i).Get("id")] = i;
@@ -1171,7 +1171,7 @@ namespace Lucene.Net.Facet
return res;
}
- internal virtual void VerifyEquals(string[][] dimValues, IndexSearcher
s, TestFacetResult expected, DrillSidewaysResult actual, IDictionary<string,
float?> scores, bool isSortedSetDV)
+ internal virtual void VerifyEquals(string[][] dimValues, IndexSearcher
s, TestFacetResult expected, DrillSidewaysResult actual, IDictionary<string,
float> scores, bool isSortedSetDV)
{
if (Verbose)
{
@@ -1187,7 +1187,7 @@ namespace Lucene.Net.Facet
}
Assert.AreEqual(expected.Hits[i].id,
s.Doc(actual.Hits.ScoreDocs[i].Doc).Get("id"));
// Score should be IDENTICAL:
-
Assert.AreEqual(scores[expected.Hits[i].id].GetValueOrDefault(),
actual.Hits.ScoreDocs[i].Score, 0.0f);
+ Assert.AreEqual(scores[expected.Hits[i].id],
actual.Hits.ScoreDocs[i].Score, 0.0f);
}
for (int dim = 0; dim < expected.Counts.Length; dim++)
@@ -1201,7 +1201,7 @@ namespace Lucene.Net.Facet
}
int idx = 0;
- IDictionary<string, int?> actualValues = new
Dictionary<string, int?>();
+ IDictionary<string, int> actualValues = new Dictionary<string,
int>();
if (fr != null)
{
diff --git a/src/Lucene.Net.Tests.Queries/Mlt/TestMoreLikeThis.cs
b/src/Lucene.Net.Tests.Queries/Mlt/TestMoreLikeThis.cs
index 159443d..fcc5482 100644
--- a/src/Lucene.Net.Tests.Queries/Mlt/TestMoreLikeThis.cs
+++ b/src/Lucene.Net.Tests.Queries/Mlt/TestMoreLikeThis.cs
@@ -73,7 +73,7 @@ namespace Lucene.Net.Tests.Queries.Mlt
[Test]
public void TestBoostFactor()
{
- IDictionary<string, float?> originalValues = OriginalValues;
+ IDictionary<string, float> originalValues = GetOriginalValues();
MoreLikeThis mlt = new MoreLikeThis(reader);
mlt.Analyzer = new MockAnalyzer(Random, MockTokenizer.WHITESPACE,
false);
@@ -96,7 +96,7 @@ namespace Lucene.Net.Tests.Queries.Mlt
foreach (BooleanClause clause in clauses)
{
TermQuery tq = (TermQuery)clause.Query;
- float? termBoost = originalValues[tq.Term.Text];
+ float termBoost = originalValues[tq.Term.Text];
assertNotNull("Expected term " + tq.Term.Text, termBoost);
float totalBoost = (float) (termBoost * boostFactor);
@@ -105,28 +105,25 @@ namespace Lucene.Net.Tests.Queries.Mlt
}
}
- private IDictionary<string, float?> OriginalValues
+ private IDictionary<string, float> GetOriginalValues()
{
- get
- {
- IDictionary<string, float?> originalValues = new
Dictionary<string, float?>();
- MoreLikeThis mlt = new MoreLikeThis(reader);
- mlt.Analyzer = new MockAnalyzer(Random,
MockTokenizer.WHITESPACE, false);
- mlt.MinDocFreq = 1;
- mlt.MinTermFreq = 1;
- mlt.MinWordLen = 1;
- mlt.FieldNames = new[] { "text" };
- mlt.ApplyBoost = true;
- BooleanQuery query = (BooleanQuery)mlt.Like(new
StringReader("lucene release"), "text");
- IList<BooleanClause> clauses = query.Clauses;
+ IDictionary<string, float> originalValues = new Dictionary<string,
float>();
+ MoreLikeThis mlt = new MoreLikeThis(reader);
+ mlt.Analyzer = new MockAnalyzer(Random, MockTokenizer.WHITESPACE,
false);
+ mlt.MinDocFreq = 1;
+ mlt.MinTermFreq = 1;
+ mlt.MinWordLen = 1;
+ mlt.FieldNames = new[] { "text" };
+ mlt.ApplyBoost = true;
+ BooleanQuery query = (BooleanQuery)mlt.Like(new
StringReader("lucene release"), "text");
+ IList<BooleanClause> clauses = query.Clauses;
- foreach (BooleanClause clause in clauses)
- {
- TermQuery tq = (TermQuery)clause.Query;
- originalValues[tq.Term.Text] = tq.Boost;
- }
- return originalValues;
+ foreach (BooleanClause clause in clauses)
+ {
+ TermQuery tq = (TermQuery)clause.Query;
+ originalValues[tq.Term.Text] = tq.Boost;
}
+ return originalValues;
}
// LUCENE-3326
diff --git a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
index 7c60bc0..c5c4765 100644
--- a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
+++ b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
@@ -395,7 +395,7 @@ namespace Lucene.Net.Tests.Queries
}
}
- private void LogResult(string msg, IndexSearcher s, Query q, int doc,
float? score1)
+ private void LogResult(string msg, IndexSearcher s, Query q, int doc,
float score1)
{
Log(msg + " " + score1);
Log("Explain by: " + q);
diff --git
a/src/Lucene.Net.Tests.QueryParser/Flexible/Precedence/TestPrecedenceQueryParser.cs
b/src/Lucene.Net.Tests.QueryParser/Flexible/Precedence/TestPrecedenceQueryParser.cs
index fc7a51d..e137308 100644
---
a/src/Lucene.Net.Tests.QueryParser/Flexible/Precedence/TestPrecedenceQueryParser.cs
+++
b/src/Lucene.Net.Tests.QueryParser/Flexible/Precedence/TestPrecedenceQueryParser.cs
@@ -483,7 +483,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Precedence
String hourField = "hour";
PrecedenceQueryParser qp = new PrecedenceQueryParser(new
MockAnalyzer(Random));
- IDictionary<string, DateResolution?> fieldMap = new
JCG.Dictionary<string, DateResolution?>();
+ IDictionary<string, DateResolution> fieldMap = new
JCG.Dictionary<string, DateResolution>();
// set a field specific date resolution
fieldMap.Put(monthField, DateResolution.MONTH);
#pragma warning disable 612, 618
diff --git
a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs
b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs
index fd2573a..9a31c2c 100644
---
a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs
+++
b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs
@@ -143,7 +143,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
[Test]
public void TestBoostsSimple()
{
- IDictionary<String, float?> boosts = new Dictionary<String,
float?>();
+ IDictionary<String, float> boosts = new Dictionary<String,
float>();
boosts.Put("b", 5);
boosts.Put("t", 10);
String[] fields = { "b", "t" };
diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
index e4ba9a2..78ad0ea 100644
--- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
@@ -799,7 +799,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
String hourField = "hour";
StandardQueryParser qp = new StandardQueryParser();
- IDictionary<string, DateResolution?> dateRes = new
Dictionary<string, DateResolution?>();
+ IDictionary<string, DateResolution> dateRes = new
Dictionary<string, DateResolution>();
// set a field specific date resolution
dateRes.Put(monthField, DateResolution.MONTH);
diff --git a/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
b/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
index 5ee7789..5706bc5 100644
--- a/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
+++ b/src/Lucene.Net.Tests.Spatial/PortedSolr3Test.cs
@@ -187,14 +187,14 @@ namespace Lucene.Net.Spatial
assertEquals("" + shape, assertNumFound, results.numFound);
if (assertIds != null)
{
- ISet<int?> resultIds = new JCG.HashSet<int?>();
+ ISet<int> resultIds = new JCG.HashSet<int>();
foreach (SearchResult result in results.results)
{
- resultIds.add(int.Parse(result.document.Get("id"),
CultureInfo.InvariantCulture));
+ resultIds.Add(int.Parse(result.document.Get("id"),
CultureInfo.InvariantCulture));
}
foreach (int assertId in assertIds)
{
- assertTrue("has " + assertId,
resultIds.contains(assertId));
+ assertTrue("has " + assertId,
resultIds.Contains(assertId));
}
}
}
diff --git
a/src/Lucene.Net.Tests.Spatial/Prefix/TestRecursivePrefixTreeStrategy.cs
b/src/Lucene.Net.Tests.Spatial/Prefix/TestRecursivePrefixTreeStrategy.cs
index c3be98e..2db1688 100644
--- a/src/Lucene.Net.Tests.Spatial/Prefix/TestRecursivePrefixTreeStrategy.cs
+++ b/src/Lucene.Net.Tests.Spatial/Prefix/TestRecursivePrefixTreeStrategy.cs
@@ -113,14 +113,14 @@ namespace Lucene.Net.Spatial.Prefix
assertEquals("" + args, assertNumFound, got.numFound);
if (assertIds != null)
{
- ISet<int?> gotIds = new JCG.HashSet<int?>();
+ ISet<int> gotIds = new JCG.HashSet<int>();
foreach (SearchResult result in got.results)
{
- gotIds.add(int.Parse(result.document.Get("id"),
CultureInfo.InvariantCulture));
+ gotIds.Add(int.Parse(result.document.Get("id"),
CultureInfo.InvariantCulture));
}
foreach (int assertId in assertIds)
{
- assertTrue("has " + assertId, gotIds.contains(assertId));
+ assertTrue("has " + assertId, gotIds.Contains(assertId));
}
}
}
diff --git
a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
index 8c5fb98..a4e6682 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
@@ -420,14 +420,14 @@ namespace Lucene.Net.Search.Suggest.Analyzing
sug.Build(new TestRandomInputEnumerator(docs));
// Build inefficient but hopefully correct model:
- IList<IDictionary<string, int?>> gramCounts = new
JCG.List<IDictionary<string, int?>>(grams);
+ IList<IDictionary<string, int>> gramCounts = new
JCG.List<IDictionary<string, int>>(grams);
for (int gram = 0; gram < grams; gram++)
{
if (Verbose)
{
Console.WriteLine("TEST: build model for gram=" + gram);
}
- IDictionary<string, int?> model = new JCG.Dictionary<string,
int?>();
+ IDictionary<string, int> model = new JCG.Dictionary<string,
int>();
gramCounts.Add(model);
foreach (string[] doc in docs)
{
@@ -443,7 +443,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
b.append(doc[j]);
}
string token = b.toString();
- if (!model.TryGetValue(token, out int? curCount) ||
curCount == null)
+ if (!model.TryGetValue(token, out int curCount))
{
model.Put(token, 1);
}
@@ -453,7 +453,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
}
if (Verbose)
{
- Console.WriteLine(" add '" + token + "' ->
count=" + (model.TryGetValue(token, out int? count) ? (count.HasValue ?
count.ToString() : "null") : ""));
+ Console.WriteLine(" add '" + token + "' ->
count=" + (model.TryGetValue(token, out int count) ? count.ToString() : ""));
}
}
}
@@ -558,7 +558,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
{
//int? count = gramCounts.get(i - 1).get(context);
var gramCount = gramCounts[i - 1];
- if (!gramCount.TryGetValue(context, out int? count) ||
count == null)
+ if (!gramCount.TryGetValue(context, out int count))
{
// We never saw this context:
backoff *= FreeTextSuggester.ALPHA;
@@ -568,13 +568,13 @@ namespace Lucene.Net.Search.Suggest.Analyzing
}
continue;
}
- contextCount = count.GetValueOrDefault();
+ contextCount = count;
}
if (Verbose)
{
Console.WriteLine(" contextCount=" +
contextCount);
}
- IDictionary<string, int?> model = gramCounts[i];
+ IDictionary<string, int> model = gramCounts[i];
// First pass, gather all predictions for this model:
if (Verbose)
@@ -600,7 +600,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
}
string ngram = (context + " " + term).Trim();
//Integer count = model.get(ngram);
- if (model.TryGetValue(ngram, out int? count) &&
count != null)
+ if (model.TryGetValue(ngram, out int count))
{
// LUCENENET NOTE: We need to calculate this
as decimal because when using double it can sometimes
// return numbers that are greater than
long.MaxValue, which results in a negative long number.
diff --git a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
index 22b3371..7c6138b 100644
--- a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
@@ -131,7 +131,7 @@ namespace Lucene.Net.Index
RandomIndexWriter w = new RandomIndexWriter(Random, dir, conf);
int[][] idToOrds = new int[NUM_DOCS][];
- ISet<int?> ordsForDocSet = new JCG.HashSet<int?>();
+ ISet<int> ordsForDocSet = new JCG.HashSet<int>();
for (int id = 0; id < NUM_DOCS; id++)
{
@@ -244,7 +244,7 @@ namespace Lucene.Net.Index
RandomIndexWriter w = new RandomIndexWriter(Random, dir, conf);
int[][] idToOrds = new int[NUM_DOCS][];
- ISet<int?> ordsForDocSet = new JCG.HashSet<int?>();
+ ISet<int> ordsForDocSet = new JCG.HashSet<int>();
for (int id = 0; id < NUM_DOCS; id++)
{
@@ -296,7 +296,7 @@ namespace Lucene.Net.Index
for (int id = 0; id < NUM_DOCS; id++)
{
int[] docOrds = idToOrds[id];
- IList<int?> newOrds = new JCG.List<int?>();
+ IList<int> newOrds = new JCG.List<int>();
foreach (int ord in idToOrds[id])
{
if (StringHelper.StartsWith(termsArray[ord],
prefixRef))
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
index f1b65ab..587f411 100644
--- a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
@@ -51,7 +51,7 @@ namespace Lucene.Net.Index
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir,
NewIndexWriterConfig(TEST_VERSION_CURRENT, new
MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));
- IList<long?> numbers = new JCG.List<long?>();
+ IList<long> numbers = new JCG.List<long>();
IList<BytesRef> binary = new JCG.List<BytesRef>();
IList<BytesRef> sorted = new JCG.List<BytesRef>();
int numDocs = AtLeast(100);
@@ -103,7 +103,7 @@ namespace Lucene.Net.Index
{
private readonly TestDocValuesWithThreads outerInstance;
- private readonly IList<long?> numbers;
+ private readonly IList<long> numbers;
private readonly IList<BytesRef> binary;
private readonly IList<BytesRef> sorted;
private readonly int numDocs;
@@ -111,7 +111,7 @@ namespace Lucene.Net.Index
private readonly CountdownEvent startingGun;
private readonly Random threadRandom;
- public ThreadAnonymousClass(TestDocValuesWithThreads
outerInstance, IList<long?> numbers, IList<BytesRef> binary, IList<BytesRef>
sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random
threadRandom)
+ public ThreadAnonymousClass(TestDocValuesWithThreads
outerInstance, IList<long> numbers, IList<BytesRef> binary, IList<BytesRef>
sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random
threadRandom)
{
this.outerInstance = outerInstance;
this.numbers = numbers;
@@ -143,16 +143,16 @@ namespace Lucene.Net.Index
{
#pragma warning disable 612, 618
case 0:
- Assert.AreEqual((long)(sbyte)numbers[docID],
(sbyte)FieldCache.DEFAULT.GetBytes(ar, "number", false).Get(docID));
+ Assert.AreEqual((sbyte)numbers[docID],
(sbyte)FieldCache.DEFAULT.GetBytes(ar, "number", false).Get(docID));
break;
case 1:
- Assert.AreEqual((long)(short)numbers[docID],
FieldCache.DEFAULT.GetInt16s(ar, "number", false).Get(docID));
+ Assert.AreEqual((short)numbers[docID],
FieldCache.DEFAULT.GetInt16s(ar, "number", false).Get(docID));
break;
#pragma warning restore 612, 618
case 2:
- Assert.AreEqual((long)(int)numbers[docID],
FieldCache.DEFAULT.GetInt32s(ar, "number", false).Get(docID));
+ Assert.AreEqual((int)numbers[docID],
FieldCache.DEFAULT.GetInt32s(ar, "number", false).Get(docID));
break;
case 3:
diff --git a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
index b854216..7ac3501 100644
--- a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
@@ -129,13 +129,13 @@ namespace Lucene.Net.Index
int numDocs = AtLeast(47);
int max = 1051;
int term = Random.Next(max);
- int?[][] positionsInDoc = new int?[numDocs][];
+ int[][] positionsInDoc = new int[numDocs][];
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.OmitNorms = true;
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
- JCG.List<int?> positions = new JCG.List<int?>();
+ JCG.List<int> positions = new JCG.List<int>();
StringBuilder builder = new StringBuilder();
int num = AtLeast(131);
for (int j = 0; j < num; j++)
@@ -188,7 +188,7 @@ namespace Lucene.Net.Index
{
break;
}
- int?[] pos =
positionsInDoc[atomicReaderContext.DocBase + docID];
+ int[] pos = positionsInDoc[atomicReaderContext.DocBase
+ docID];
Assert.AreEqual(pos.Length, docsAndPosEnum.Freq);
// number of positions read should be random - don't
read all of them
// allways
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
index 593a618..a83254d 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
@@ -45,7 +45,7 @@ namespace Lucene.Net.Index
{
DocumentsWriterDeleteQueue queue = new
DocumentsWriterDeleteQueue();
int size = 200 + Random.Next(500) * RandomMultiplier;
- int?[] ids = new int?[size];
+ int[] ids = new int[size];
for (int i = 0; i < ids.Length; i++)
{
ids[i] = Random.Next();
@@ -59,7 +59,7 @@ namespace Lucene.Net.Index
ISet<Term> uniqueValues = new JCG.HashSet<Term>();
for (int j = 0; j < ids.Length; j++)
{
- int? i = ids[j];
+ int i = ids[j];
// create an array here since we compare identity below
against tailItem
Term[] term = new Term[] { new Term("id", i.ToString()) };
uniqueValues.Add(term[0]);
@@ -135,7 +135,7 @@ namespace Lucene.Net.Index
return true;
}
- private void AssertAllBetween(int start, int end, BufferedUpdates
deletes, int?[] ids)
+ private void AssertAllBetween(int start, int end, BufferedUpdates
deletes, int[] ids)
{
for (int i = start; i <= end; i++)
{
@@ -255,7 +255,7 @@ namespace Lucene.Net.Index
DocumentsWriterDeleteQueue queue = new
DocumentsWriterDeleteQueue();
ISet<Term> uniqueValues = new JCG.HashSet<Term>();
int size = 10000 + Random.Next(500) * RandomMultiplier;
- int?[] ids = new int?[size];
+ int[] ids = new int[size];
for (int i = 0; i < ids.Length; i++)
{
ids[i] = Random.Next();
@@ -301,12 +301,12 @@ namespace Lucene.Net.Index
{
internal readonly DocumentsWriterDeleteQueue queue;
internal readonly AtomicInt32 index;
- internal readonly int?[] ids;
+ internal readonly int[] ids;
internal readonly DeleteSlice slice;
internal readonly BufferedUpdates deletes;
internal readonly CountdownEvent latch;
- protected internal UpdateThread(DocumentsWriterDeleteQueue queue,
AtomicInt32 index, int?[] ids, CountdownEvent latch)
+ protected internal UpdateThread(DocumentsWriterDeleteQueue queue,
AtomicInt32 index, int[] ids, CountdownEvent latch)
{
this.queue = queue;
this.index = index;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
index dcbd2e3..3800db1 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
@@ -1114,7 +1114,7 @@ namespace Lucene.Net.Index
#endif
Random, dir);
int NUM_DOCS = AtLeast(1000);
- IList<int?> ids = new JCG.List<int?>(NUM_DOCS);
+ IList<int> ids = new JCG.List<int>(NUM_DOCS);
for (int id = 0; id < NUM_DOCS; id++)
{
ids.Add(id);
diff --git a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
index 07ea845..80acf04 100644
--- a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
+++ b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.Index
private Directory dir;
private IndexReader reader;
/* expected maxTermFrequency values for our documents */
- private readonly IList<int?> expected = new JCG.List<int?>();
+ private readonly IList<int> expected = new JCG.List<int>();
[SetUp]
public override void SetUp()
@@ -82,7 +82,7 @@ namespace Lucene.Net.Index
NumericDocValues fooNorms = MultiDocValues.GetNormValues(reader,
"foo");
for (int i = 0; i < reader.MaxDoc; i++)
{
- Assert.AreEqual((int)expected[i], fooNorms.Get(i) & 0xff);
+ Assert.AreEqual(expected[i], fooNorms.Get(i) & 0xff);
}
}
diff --git a/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
b/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
index 48ccd44..7a65c34 100644
--- a/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
+++ b/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
@@ -86,10 +86,10 @@ namespace Lucene.Net.Index
}
// Random delete half the docs:
- ISet<int?> deleted = new JCG.HashSet<int?>();
+ ISet<int> deleted = new JCG.HashSet<int>();
while (deleted.Count < NUM_DOCS / 2)
{
- int? toDelete = Random.Next(NUM_DOCS);
+ int toDelete = Random.Next(NUM_DOCS);
if (!deleted.Contains(toDelete))
{
deleted.Add(toDelete);
diff --git a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
index f4e639f..bcf3d10 100644
--- a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
@@ -357,7 +357,7 @@ namespace Lucene.Net.Index
for (int i = 0; i < numDocs; i++)
{
single.SetDocument(i);
- IList<long?> expectedList = new JCG.List<long?>();
+ IList<long> expectedList = new JCG.List<long>();
long ord;
while ((ord = single.NextOrd()) !=
SortedSetDocValues.NO_MORE_ORDS)
{
@@ -368,7 +368,7 @@ namespace Lucene.Net.Index
int upto = 0;
while ((ord = multi.NextOrd()) !=
SortedSetDocValues.NO_MORE_ORDS)
{
- Assert.AreEqual((long)expectedList[upto], ord);
+ Assert.AreEqual(expectedList[upto], ord);
upto++;
}
Assert.AreEqual(expectedList.Count, upto);
diff --git a/src/Lucene.Net.Tests/Index/TestMultiFields.cs
b/src/Lucene.Net.Tests/Index/TestMultiFields.cs
index 01ea3e0..be65897 100644
--- a/src/Lucene.Net.Tests/Index/TestMultiFields.cs
+++ b/src/Lucene.Net.Tests/Index/TestMultiFields.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.Index
// we can do this because we use NoMergePolicy (and dont merge
to "nothing")
w.KeepFullyDeletedSegments = true;
- IDictionary<BytesRef, IList<int?>> docs = new
Dictionary<BytesRef, IList<int?>>();
+ IDictionary<BytesRef, IList<int>> docs = new
Dictionary<BytesRef, IList<int>>();
ISet<int?> deleted = new JCG.HashSet<int?>();
IList<BytesRef> terms = new JCG.List<BytesRef>();
@@ -82,9 +82,9 @@ namespace Lucene.Net.Index
{
string s = TestUtil.RandomUnicodeString(Random, 10);
BytesRef term = new BytesRef(s);
- if (!docs.TryGetValue(term, out IList<int?> docsTerm))
+ if (!docs.TryGetValue(term, out IList<int> docsTerm))
{
- docs[term] = docsTerm = new JCG.List<int?>();
+ docs[term] = docsTerm = new JCG.List<int>();
}
docsTerm.Add(i);
terms.Add(term);
diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
index d56832f..6a36639 100644
--- a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
+++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
@@ -242,7 +242,7 @@ namespace Lucene.Net.Index
public virtual void TestRandom()
{
// token -> docID -> tokens
- IDictionary<string, IDictionary<int?, IList<Token>>> actualTokens
= new Dictionary<string, IDictionary<int?, IList<Token>>>();
+ IDictionary<string, IDictionary<int, IList<Token>>> actualTokens =
new Dictionary<string, IDictionary<int, IList<Token>>>();
Directory dir = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random, dir, iwc);
@@ -301,9 +301,9 @@ namespace Lucene.Net.Index
int tokenOffset = Random.Next(5);
Token token = MakeToken(text, posIncr, offset + offIncr,
offset + offIncr + tokenOffset);
- if (!actualTokens.TryGetValue(text, out IDictionary<int?,
IList<Token>> postingsByDoc))
+ if (!actualTokens.TryGetValue(text, out IDictionary<int,
IList<Token>> postingsByDoc))
{
- actualTokens[text] = postingsByDoc = new
Dictionary<int?, IList<Token>>();
+ actualTokens[text] = postingsByDoc = new
Dictionary<int, IList<Token>>();
}
if (!postingsByDoc.TryGetValue(docCount, out IList<Token>
postings))
{
diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
index 7f2712c..31a9d05 100644
--- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
+++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
@@ -204,7 +204,7 @@ namespace Lucene.Net.Index
docs.Dispose();
}
- private void AddDoc(RandomIndexWriter w, ICollection<string> terms,
IDictionary<BytesRef, int?> termToID, int id)
+ private void AddDoc(RandomIndexWriter w, ICollection<string> terms,
IDictionary<BytesRef, int> termToID, int id)
{
Document doc = new Document();
doc.Add(new Int32Field("id", id, Field.Store.NO));
@@ -248,7 +248,7 @@ namespace Lucene.Net.Index
ISet<string> terms = new JCG.HashSet<string>();
ICollection<string> pendingTerms = new JCG.List<string>();
- IDictionary<BytesRef, int?> termToID = new Dictionary<BytesRef,
int?>();
+ IDictionary<BytesRef, int> termToID = new Dictionary<BytesRef,
int>();
int id = 0;
while (terms.Count != numTerms)
{
diff --git a/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
b/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
index 24094cd..2661b91 100644
--- a/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
+++ b/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
@@ -126,7 +126,7 @@ namespace Lucene.Net.Search
// make a query without sorting first
ScoreDoc[] hitsByRank = searcher.Search(query, null,
int.MaxValue).ScoreDocs;
CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates
- IDictionary<int?, int?> resultMap = new JCG.SortedDictionary<int?,
int?>();
+ IDictionary<int, int> resultMap = new JCG.SortedDictionary<int,
int>();
// store hits in TreeMap - TreeMap does not allow duplicates;
existing
// entries are silently overwritten
for (int hitid = 0; hitid < hitsByRank.Length; ++hitid)
@@ -142,7 +142,7 @@ namespace Lucene.Net.Search
// besides the sorting both sets of hits must be identical
for (int hitid = 0; hitid < resultSort.Length; ++hitid)
{
- int? idHitDate = Convert.ToInt32(resultSort[hitid].Doc); //
document ID
+ int idHitDate = Convert.ToInt32(resultSort[hitid].Doc); //
document ID
// from sorted
// search
if (!resultMap.ContainsKey(idHitDate))
@@ -175,13 +175,11 @@ namespace Lucene.Net.Search
{
if (hits != null)
{
- IDictionary<int?, int?> idMap = new JCG.SortedDictionary<int?,
int?>();
+ IDictionary<int, int> idMap = new JCG.SortedDictionary<int,
int>();
for (int docnum = 0; docnum < hits.Length; ++docnum)
{
- int? luceneId = null;
-
- luceneId = Convert.ToInt32(hits[docnum].Doc);
- if (idMap.TryGetValue(luceneId, out int? value))
+ int luceneId = Convert.ToInt32(hits[docnum].Doc);
+ if (idMap.TryGetValue(luceneId, out int value))
{
StringBuilder message = new StringBuilder(prefix);
message.Append("Duplicate key for hit index = ");
diff --git a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
index db57a7a..6b66ebb 100644
--- a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Search
DocIdSet filteredSet = new FilteredDocIdSetAnonymousClass(this,
innerSet);
DocIdSetIterator iter = filteredSet.GetIterator();
- IList<int?> list = new JCG.List<int?>();
+ IList<int> list = new JCG.List<int>();
int doc = iter.Advance(3);
if (doc != DocIdSetIterator.NO_MORE_DOCS)
{
@@ -60,10 +60,10 @@ namespace Lucene.Net.Search
int[] docs = new int[list.Count];
int c = 0;
- IEnumerator<int?> intIter = list.GetEnumerator();
+ using IEnumerator<int> intIter = list.GetEnumerator();
while (intIter.MoveNext())
{
- docs[c++] = (int)intIter.Current;
+ docs[c++] = intIter.Current;
}
int[] answer = new int[] { 4, 6, 8 };
bool same = Arrays.Equals(answer, docs);
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
b/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
index c4ad846..b79bb05 100644
--- a/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
using NUnit.Framework;
using System;
using System.Globalization;
@@ -225,9 +225,9 @@ namespace Lucene.Net.Search
int numDocs = reader.NumDocs;
int medId = ((maxId - minId) / 2);
- short? minIdO = Convert.ToInt16((short)minId);
- short? maxIdO = Convert.ToInt16((short)maxId);
- short? medIdO = Convert.ToInt16((short)medId);
+ short minIdO = Convert.ToInt16((short)minId);
+ short maxIdO = Convert.ToInt16((short)maxId);
+ short medIdO = Convert.ToInt16((short)medId);
Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
@@ -317,9 +317,9 @@ namespace Lucene.Net.Search
int numDocs = reader.NumDocs;
int medId = ((maxId - minId) / 2);
- int? minIdO = Convert.ToInt32(minId);
- int? maxIdO = Convert.ToInt32(maxId);
- int? medIdO = Convert.ToInt32(medId);
+ int minIdO = Convert.ToInt32(minId);
+ int maxIdO = Convert.ToInt32(maxId);
+ int medIdO = Convert.ToInt32(medId);
Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
@@ -408,9 +408,9 @@ namespace Lucene.Net.Search
int numDocs = reader.NumDocs;
int medId = ((maxId - minId) / 2);
- long? minIdO = Convert.ToInt64(minId);
- long? maxIdO = Convert.ToInt64(maxId);
- long? medIdO = Convert.ToInt64(medId);
+ long minIdO = Convert.ToInt64(minId);
+ long maxIdO = Convert.ToInt64(maxId);
+ long medIdO = Convert.ToInt64(medId);
Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
@@ -500,8 +500,8 @@ namespace Lucene.Net.Search
IndexSearcher search = NewSearcher(reader);
int numDocs = reader.NumDocs;
- float? minIdO = Convert.ToSingle(minId + .5f);
- float? medIdO = Convert.ToSingle((float)minIdO + ((maxId - minId))
/ 2.0f);
+ float minIdO = Convert.ToSingle(minId + .5f);
+ float medIdO = Convert.ToSingle((float)minIdO + ((maxId - minId))
/ 2.0f);
ScoreDoc[] result;
Query q = new TermQuery(new Term("body", "body"));
@@ -529,8 +529,8 @@ namespace Lucene.Net.Search
IndexSearcher search = NewSearcher(reader);
int numDocs = reader.NumDocs;
- double? minIdO = Convert.ToDouble(minId + .5);
- double? medIdO = Convert.ToDouble((float)minIdO + ((maxId -
minId)) / 2.0);
+ double minIdO = Convert.ToDouble(minId + .5);
+ double medIdO = Convert.ToDouble((float)minIdO + ((maxId - minId))
/ 2.0);
ScoreDoc[] result;
Query q = new TermQuery(new Term("body", "body"));
diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
index f621a36..adc8147 100644
--- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
+++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
@@ -349,7 +349,7 @@ namespace Lucene.Net.Search
internal readonly SortedSetDocValues dv;
internal readonly int maxDoc;
- internal readonly ISet<long?> ords = new JCG.HashSet<long?>();
+ internal readonly ISet<long> ords = new JCG.HashSet<long>();
internal readonly SimScorer[] sims;
internal readonly int minNrShouldMatch;
diff --git a/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
b/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
index 8766f0b..dfc981d 100644
--- a/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
+++ b/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Index.Extensions;
using Lucene.Net.Store;
@@ -76,7 +76,7 @@ namespace Lucene.Net.Search
private readonly ICollector other;
private int docBase;
- public IDictionary<int?, IDictionary<Query, float?>> DocCounts {
get; } = new Dictionary<int?, IDictionary<Query, float?>>();
+ public IDictionary<int, IDictionary<Query, float>> DocCounts {
get; } = new Dictionary<int, IDictionary<Query, float>>();
private readonly IDictionary<Query, Scorer> subScorers = new
Dictionary<Query, Scorer>();
private readonly ISet<string> relationships;
@@ -113,7 +113,7 @@ namespace Lucene.Net.Search
public virtual void Collect(int doc)
{
- IDictionary<Query, float?> freqs = new Dictionary<Query,
float?>();
+ IDictionary<Query, float> freqs = new Dictionary<Query,
float>();
foreach (KeyValuePair<Query, Scorer> ent in subScorers)
{
Scorer value = ent.Value;
@@ -145,13 +145,13 @@ namespace Lucene.Net.Search
Assert.AreEqual(maxDocs, c.DocCounts.Count);
for (int i = 0; i < maxDocs; i++)
{
- IDictionary<Query, float?> doc0 = c.DocCounts[i];
+ IDictionary<Query, float> doc0 = c.DocCounts[i];
Assert.AreEqual(1, doc0.Count);
- Assert.AreEqual(4.0F, doc0[q].GetValueOrDefault(),
FLOAT_TOLERANCE);
+ Assert.AreEqual(4.0F, doc0[q], FLOAT_TOLERANCE);
- IDictionary<Query, float?> doc1 = c.DocCounts[++i];
+ IDictionary<Query, float> doc1 = c.DocCounts[++i];
Assert.AreEqual(1, doc1.Count);
- Assert.AreEqual(1.0F, doc1[q].GetValueOrDefault(),
FLOAT_TOLERANCE);
+ Assert.AreEqual(1.0F, doc1[q], FLOAT_TOLERANCE);
}
}
@@ -185,22 +185,22 @@ namespace Lucene.Net.Search
bool includeOptional = occur.Contains("SHOULD");
for (int i = 0; i < maxDocs; i++)
{
- IDictionary<Query, float?> doc0 = c.DocCounts[i];
+ IDictionary<Query, float> doc0 = c.DocCounts[i];
Assert.AreEqual(includeOptional ? 5 : 4, doc0.Count);
- Assert.AreEqual(1.0F, doc0[aQuery].GetValueOrDefault(),
FLOAT_TOLERANCE);
- Assert.AreEqual(4.0F, doc0[dQuery].GetValueOrDefault(),
FLOAT_TOLERANCE);
+ Assert.AreEqual(1.0F, doc0[aQuery], FLOAT_TOLERANCE);
+ Assert.AreEqual(4.0F, doc0[dQuery], FLOAT_TOLERANCE);
if (includeOptional)
{
- Assert.AreEqual(3.0F,
doc0[cQuery].GetValueOrDefault(), FLOAT_TOLERANCE);
+ Assert.AreEqual(3.0F, doc0[cQuery], FLOAT_TOLERANCE);
}
- IDictionary<Query, float?> doc1 = c.DocCounts[++i];
+ IDictionary<Query, float> doc1 = c.DocCounts[++i];
Assert.AreEqual(includeOptional ? 5 : 4, doc1.Count);
- Assert.AreEqual(1.0F, doc1[aQuery].GetValueOrDefault(),
FLOAT_TOLERANCE);
- Assert.AreEqual(1.0F, doc1[dQuery].GetValueOrDefault(),
FLOAT_TOLERANCE);
+ Assert.AreEqual(1.0F, doc1[aQuery], FLOAT_TOLERANCE);
+ Assert.AreEqual(1.0F, doc1[dQuery], FLOAT_TOLERANCE);
if (includeOptional)
{
- Assert.AreEqual(1.0F,
doc1[cQuery].GetValueOrDefault(), FLOAT_TOLERANCE);
+ Assert.AreEqual(1.0F, doc1[cQuery], FLOAT_TOLERANCE);
}
}
}
@@ -218,13 +218,13 @@ namespace Lucene.Net.Search
Assert.AreEqual(maxDocs, c.DocCounts.Count);
for (int i = 0; i < maxDocs; i++)
{
- IDictionary<Query, float?> doc0 = c.DocCounts[i];
+ IDictionary<Query, float> doc0 = c.DocCounts[i];
Assert.AreEqual(1, doc0.Count);
- Assert.AreEqual(2.0F, doc0[q].GetValueOrDefault(),
FLOAT_TOLERANCE);
+ Assert.AreEqual(2.0F, doc0[q], FLOAT_TOLERANCE);
- IDictionary<Query, float?> doc1 = c.DocCounts[++i];
+ IDictionary<Query, float> doc1 = c.DocCounts[++i];
Assert.AreEqual(1, doc1.Count);
- Assert.AreEqual(1.0F, doc1[q].GetValueOrDefault(),
FLOAT_TOLERANCE);
+ Assert.AreEqual(1.0F, doc1[q], FLOAT_TOLERANCE);
}
}
}
diff --git a/src/Lucene.Net.Tests/Util/TestBytesRefHash.cs
b/src/Lucene.Net.Tests/Util/TestBytesRefHash.cs
index 53cae51..8233b52 100644
--- a/src/Lucene.Net.Tests/Util/TestBytesRefHash.cs
+++ b/src/Lucene.Net.Tests/Util/TestBytesRefHash.cs
@@ -104,7 +104,7 @@ namespace Lucene.Net.Util
int num = AtLeast(2);
for (int j = 0; j < num; j++)
{
- IDictionary<string, int?> strings = new Dictionary<string,
int?>();
+ IDictionary<string, int> strings = new Dictionary<string,
int>();
int uniqueCount = 0;
for (int i = 0; i < 797; i++)
{
@@ -130,7 +130,7 @@ namespace Lucene.Net.Util
Assert.AreEqual(hash.Count, count);
}
}
- foreach (KeyValuePair<string, int?> entry in strings)
+ foreach (KeyValuePair<string, int> entry in strings)
{
@ref.CopyChars(entry.Key);
Assert.AreEqual(@ref, hash.Get((int)entry.Value, scratch));
diff --git a/src/Lucene.Net.Tests/Util/TestDoubleBarrelLRUCache.cs
b/src/Lucene.Net.Tests/Util/TestDoubleBarrelLRUCache.cs
index a819d4f..52fb432 100644
--- a/src/Lucene.Net.Tests/Util/TestDoubleBarrelLRUCache.cs
+++ b/src/Lucene.Net.Tests/Util/TestDoubleBarrelLRUCache.cs
@@ -201,9 +201,9 @@ namespace Lucene.Net.Util
protected internal class CloneableInteger :
DoubleBarrelLRUCache.CloneableKey
{
- internal int? value;
+ internal int value;
- public CloneableInteger(int? value)
+ public CloneableInteger(int value)
{
this.value = value;
}
diff --git a/src/Lucene.Net.Tests/Util/TestMergedIterator.cs
b/src/Lucene.Net.Tests/Util/TestMergedIterator.cs
index 55e9c06..3242f39 100644
--- a/src/Lucene.Net.Tests/Util/TestMergedIterator.cs
+++ b/src/Lucene.Net.Tests/Util/TestMergedIterator.cs
@@ -123,7 +123,7 @@ namespace Lucene.Net.Util
private void TestCase(int itrsWithVal, int specifiedValsOnItr, bool
removeDups)
{
// Build a random number of lists
- IList<int?> expected = new JCG.List<int?>();
+ IList<int> expected = new JCG.List<int>();
Random random = new J2N.Randomizer(Random.NextInt64());
int numLists = itrsWithVal + random.Next(1000 - itrsWithVal);
IList<int>[] lists = new IList<int>[numLists];
@@ -166,7 +166,7 @@ namespace Lucene.Net.Util
try
{
MergedEnumerator<int> mergedItr = new
MergedEnumerator<int>(removeDups, itrs);
- IEnumerator<int?> expectedItr = expected.GetEnumerator();
+ using IEnumerator<int> expectedItr = expected.GetEnumerator();
while (expectedItr.MoveNext())
{
Assert.IsTrue(mergedItr.MoveNext());
@@ -292,7 +292,7 @@ namespace Lucene.Net.Util
private void TestCaseIterator(int itrsWithVal, int specifiedValsOnItr,
bool removeDups)
{
// Build a random number of lists
- IList<int?> expected = new JCG.List<int?>();
+ IList<int> expected = new JCG.List<int>();
Random random = new J2N.Randomizer(Random.NextInt64());
int numLists = itrsWithVal + random.Next(1000 - itrsWithVal);
IList<int>[] lists = new IList<int>[numLists];
@@ -334,7 +334,7 @@ namespace Lucene.Net.Util
}
MergedIterator<int> mergedItr = new
MergedIterator<int>(removeDups, itrs);
- IEnumerator<int?> expectedItr = expected.GetEnumerator();
+ using IEnumerator<int> expectedItr = expected.GetEnumerator();
while (expectedItr.MoveNext())
{
Assert.IsTrue(mergedItr.MoveNext());