This is an automated email from the ASF dual-hosted git repository.
paulirwin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
The following commit(s) were added to refs/heads/master by this push:
new d96dc9e2f Dispose of some disposables, #265, #615 (#1074)
d96dc9e2f is described below
commit d96dc9e2fb6aad76fb555f3dc88320e50a870798
Author: Paul Irwin <[email protected]>
AuthorDate: Thu Dec 26 08:43:11 2024 -0700
Dispose of some disposables, #265, #615 (#1074)
* Add using statements where possible, per CA2000, #265
* Fix file handle leaks in demo code, #615
* Add leaveOpen parameter to InputStreamDataInput, #265
* Add leaveOpen parameter to OutputStreamDataOutput, #265
---
.../Analysis/Hunspell/Dictionary.cs | 4 +-
.../Analysis/Synonym/SynonymMap.cs | 3 +
.../Dict/BinaryDictionary.cs | 8 +--
.../Dict/CharacterDefinition.cs | 2 +-
.../Dict/ConnectionCosts.cs | 2 +-
.../Tools/BinaryDictionaryWriter.cs | 6 +-
.../Tools/CharacterDefinitionWriter.cs | 2 +-
.../Tools/ConnectionCostsBuilder.cs | 2 +-
.../Tools/ConnectionCostsWriter.cs | 4 +-
.../Tools/TokenInfoDictionaryBuilder.cs | 10 +--
.../Memory/DirectPostingsFormat.cs | 2 +-
src/Lucene.Net.Demo/SearchFiles.cs | 71 ++++++++++++----------
.../Taxonomy/Directory/DirectoryTaxonomyWriter.cs | 5 +-
src/Lucene.Net.Misc/Index/IndexSplitter.cs | 2 +-
.../Index/MultiPassIndexSplitter.cs | 2 +-
src/Lucene.Net.Misc/Misc/GetTermInfo.cs | 40 ++++++------
src/Lucene.Net.Misc/Misc/HighFreqTerms.cs | 2 +-
src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs | 2 +-
src/Lucene.Net.Suggest/Suggest/Lookup.cs | 4 +-
src/Lucene.Net.TestFramework/Util/TestUtil.cs | 17 +++---
.../Suggest/Fst/LargeInputFST.cs | 31 +++++-----
.../Codecs/Lucene45/Lucene45DocValuesConsumer.cs | 2 +-
src/Lucene.Net/Index/FlushPolicy.cs | 4 +-
src/Lucene.Net/Store/InputStreamDataInput.cs | 29 ++++++++-
src/Lucene.Net/Store/LockStressTest.cs | 5 +-
src/Lucene.Net/Store/LockVerifyServer.cs | 7 ++-
src/Lucene.Net/Store/OutputStreamDataOutput.cs | 29 ++++++++-
src/Lucene.Net/Util/Constants.cs | 5 +-
src/Lucene.Net/Util/Fst/FST.cs | 2 +-
29 files changed, 186 insertions(+), 118 deletions(-)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
index c77fd262c..ceeb7eb53 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
@@ -295,7 +295,7 @@ namespace Lucene.Net.Analysis.Hunspell
[""] = 0
};
- var reader = new StreamReader(affixStream, decoder);
+ using var reader = new StreamReader(affixStream, decoder); //
LUCENENET specific - CA2000: Use using pattern to ensure reader is disposed
string line; // LUCENENET: Removed unnecessary null assignment
int lineNumber = 0;
while ((line = reader.ReadLine()) != null)
@@ -910,7 +910,7 @@ namespace Lucene.Net.Analysis.Hunspell
{
foreach (Stream dictionary in dictionaries)
{
- var lines = new StreamReader(dictionary, decoder);
+ using var lines = new StreamReader(dictionary, decoder);
// LUCENENET specific - CA2000: Use using pattern to ensure reader is disposed
string line = lines.ReadLine(); // first line is number of
entries (approximately, sometimes)
while ((line = lines.ReadLine()) != null)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
index 4f3f66ade..c319c7d79 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
@@ -348,6 +348,9 @@ namespace Lucene.Net.Analysis.Synonym
/// <summary>
/// Parse the given input, adding synonyms to the inherited <see
cref="Builder"/>. </summary>
/// <param name="in"> The input to parse </param>
+ /// <remarks>
+ /// LUCENENET NOTE: Implementations are expected to dispose of the
<paramref name="in"/> parameter.
+ /// </remarks>
public abstract void Parse(TextReader @in);
/// <summary>
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
index 13c530afd..c4b6b77eb 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
@@ -101,8 +101,8 @@ namespace Lucene.Net.Analysis.Ja.Dict
ByteBuffer buffer; // LUCENENET: IDE0059: Remove unnecessary value
assignment
using (Stream mapIS = GetResource(TARGETMAP_FILENAME_SUFFIX))
+ using (var @in = new InputStreamDataInput(mapIS, leaveOpen: true))
// LUCENENET: CA2000: Use using statement
{
- DataInput @in = new InputStreamDataInput(mapIS);
CodecUtil.CheckHeader(@in, TARGETMAP_HEADER, VERSION, VERSION);
targetMap = new int[@in.ReadVInt32()];
targetMapOffsets = new int[@in.ReadVInt32()];
@@ -124,8 +124,8 @@ namespace Lucene.Net.Analysis.Ja.Dict
}
using (Stream posIS = GetResource(POSDICT_FILENAME_SUFFIX))
+ using (var @in = new InputStreamDataInput(posIS, leaveOpen: true))
// LUCENENET: CA2000: Use using statement
{
- DataInput @in = new InputStreamDataInput(posIS);
CodecUtil.CheckHeader(@in, POSDICT_HEADER, VERSION, VERSION);
int posSize = @in.ReadVInt32();
posDict = new string[posSize];
@@ -151,9 +151,9 @@ namespace Lucene.Net.Analysis.Ja.Dict
ByteBuffer tmpBuffer;
using (Stream dictIS = GetResource(DICT_FILENAME_SUFFIX))
+ // no buffering here, as we load in one large buffer
+ using (var @in = new InputStreamDataInput(dictIS, leaveOpen:
true)) // LUCENENET: CA2000: Use using statement
{
- // no buffering here, as we load in one large buffer
- DataInput @in = new InputStreamDataInput(dictIS);
CodecUtil.CheckHeader(@in, DICT_HEADER, VERSION, VERSION);
int size = @in.ReadVInt32();
tmpBuffer = ByteBuffer.Allocate(size); // AllocateDirect..?
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
index 5288cd03d..c9fccc184 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs
@@ -61,7 +61,7 @@ namespace Lucene.Net.Analysis.Ja.Dict
private CharacterDefinition()
{
using Stream @is = BinaryDictionary.GetTypeResource(GetType(),
FILENAME_SUFFIX);
- DataInput @in = new InputStreamDataInput(@is);
+ using var @in = new InputStreamDataInput(@is, leaveOpen: true); //
LUCENENET: CA2000: Use using statement
CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION);
@in.ReadBytes(characterCategoryMap, 0,
characterCategoryMap.Length);
for (int i = 0; i < CLASS_COUNT; i++)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
index 9065f1228..3095f3983 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
@@ -39,8 +39,8 @@ namespace Lucene.Net.Analysis.Ja.Dict
short[][] costs = null;
using (Stream @is = BinaryDictionary.GetTypeResource(GetType(),
FILENAME_SUFFIX))
+ using (var @in = new InputStreamDataInput(@is, leaveOpen: true))
// LUCENENET: CA2000: Use using statement
{
- DataInput @in = new InputStreamDataInput(@is);
CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION);
int forwardSize = @in.ReadVInt32();
int backwardSize = @in.ReadVInt32();
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
index 2c7078b3a..16d2b5634 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs
@@ -298,7 +298,7 @@ namespace Lucene.Net.Analysis.Ja.Util
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create,
FileAccess.Write);
- DataOutput @out = new OutputStreamDataOutput(os);
+ using var @out = new OutputStreamDataOutput(os, leaveOpen: true);
// LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER,
BinaryDictionary.VERSION);
int numSourceIds = lastSourceId + 1;
@@ -328,7 +328,7 @@ namespace Lucene.Net.Analysis.Ja.Util
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create,
FileAccess.Write);
- DataOutput @out = new OutputStreamDataOutput(os);
+ using var @out = new OutputStreamDataOutput(os, leaveOpen: true);
// LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER,
BinaryDictionary.VERSION);
@out.WriteVInt32(posDict.Count);
foreach (string s in posDict)
@@ -355,7 +355,7 @@ namespace Lucene.Net.Analysis.Ja.Util
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create,
FileAccess.Write);
- DataOutput @out = new OutputStreamDataOutput(os);
+ using var @out = new OutputStreamDataOutput(os, leaveOpen: true);
// LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER,
BinaryDictionary.VERSION);
@out.WriteVInt32(m_buffer.Position);
diff --git
a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
index d963d1252..140349113 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs
@@ -80,7 +80,7 @@ namespace Lucene.Net.Analysis.Ja.Util
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir));
using Stream os = new FileStream(filename, FileMode.Create,
FileAccess.Write);
- DataOutput @out = new OutputStreamDataOutput(os);
+ using var @out = new OutputStreamDataOutput(os, leaveOpen: true);
// LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER,
CharacterDefinition.VERSION);
@out.WriteBytes(characterCategoryMap, 0,
characterCategoryMap.Length);
for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++)
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
index d15ddb9ae..5d5f1d4c7 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Analysis.Ja.Util
public static ConnectionCostsWriter Build(string filename)
{
using Stream inputStream = new FileStream(filename, FileMode.Open,
FileAccess.Read);
- StreamReader streamReader = new StreamReader(inputStream,
Encoding.ASCII);
+ using StreamReader streamReader = new StreamReader(inputStream,
Encoding.ASCII, detectEncodingFromByteOrderMarks: true, bufferSize: 1024,
leaveOpen: true); // LUCENENET: CA2000: Use using statement
string line = streamReader.ReadLine();
string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd();
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
index b6c6c1bd5..486b5b050 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs
@@ -54,11 +54,11 @@ namespace Lucene.Net.Analysis.Ja.Util
// LUCENENET specific: we don't need to do a "classpath" output
directory, since we
// are changing the implementation to read files dynamically
instead of making the
// user recompile with the new files.
- string filename = System.IO.Path.Combine(baseDir,
typeof(ConnectionCosts).Name + CharacterDefinition.FILENAME_SUFFIX);
+ string filename = System.IO.Path.Combine(baseDir,
nameof(ConnectionCosts) + CharacterDefinition.FILENAME_SUFFIX);
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create,
FileAccess.Write);
- DataOutput @out = new OutputStreamDataOutput(os);
+ using var @out = new OutputStreamDataOutput(os, leaveOpen: true);
// LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER,
ConnectionCosts.VERSION);
@out.WriteVInt32(forwardSize);
@out.WriteVInt32(backwardSize);
diff --git
a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
index 3ba3ac3a5..737182961 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs
@@ -72,7 +72,7 @@ namespace Lucene.Net.Analysis.Ja.Util
{
using Stream inputStream = new FileStream(file, FileMode.Open,
FileAccess.Read);
Encoding decoder = Encoding.GetEncoding(encoding);
- TextReader reader = new StreamReader(inputStream, decoder);
+ using TextReader reader = new StreamReader(inputStream,
decoder, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen:
true); // LUCENENET: CA2000: Use using statement
string line = null;
while ((line = reader.ReadLine()) != null)
@@ -159,10 +159,10 @@ namespace Lucene.Net.Analysis.Ja.Util
return dictionary;
}
-
+
/// <summary>
/// IPADIC features
- ///
+ ///
/// 0 - surface
/// 1 - left cost
/// 2 - right cost
@@ -171,9 +171,9 @@ namespace Lucene.Net.Analysis.Ja.Util
/// 10 - base form
/// 11 - reading
/// 12 - pronounciation
- ///
+ ///
/// UniDic features
- ///
+ ///
/// 0 - surface
/// 1 - left cost
/// 2 - right cost
diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
index c3f19ac73..b58e64511 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
@@ -401,7 +401,7 @@ namespace Lucene.Net.Codecs.Memory
Int32ArrayWriter scratch = new Int32ArrayWriter();
// Used for payloads, if any:
- RAMOutputStream ros = new RAMOutputStream();
+ using RAMOutputStream ros = new RAMOutputStream(); //
LUCENENET specific - dispose when done
// if (DEBUG) {
// System.out.println("\nLOAD terms seg=" +
state.segmentInfo.name + " field=" + field + " hasOffsets=" + hasOffsets + "
hasFreq=" + hasFreq + " hasPos=" + hasPos + " hasPayloads=" + hasPayloads);
diff --git a/src/Lucene.Net.Demo/SearchFiles.cs
b/src/Lucene.Net.Demo/SearchFiles.cs
index b001396bf..01e71eed2 100644
--- a/src/Lucene.Net.Demo/SearchFiles.cs
+++ b/src/Lucene.Net.Demo/SearchFiles.cs
@@ -112,57 +112,64 @@ namespace Lucene.Net.Demo
// :Post-Release-Update-Version.LUCENE_XY:
Analyzer analyzer = new StandardAnalyzer(LuceneVersion.LUCENE_48);
+ FileStream fileStream = null; // LUCENENET specific - keep track
of the FileStream so we can dispose of it
TextReader input = null;
if (queries != null)
{
- input = new StreamReader(new FileStream(queries,
FileMode.Open, FileAccess.Read), Encoding.UTF8);
+ fileStream = new FileStream(queries, FileMode.Open,
FileAccess.Read);
+ input = new StreamReader(fileStream, Encoding.UTF8);
}
else
{
input = Console.In;
}
- // :Post-Release-Update-Version.LUCENE_XY:
- QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48,
field, analyzer);
- while (true)
+
+ using (fileStream) // LUCENENET specific - dispose of the
FileStream when we are done with it
{
- if (queries is null && queryString is null)
+ // :Post-Release-Update-Version.LUCENE_XY:
+ QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48,
field, analyzer);
+ while (true)
{
- // prompt the user
- Console.WriteLine("Enter query (or press Enter to exit):
");
- }
+ if (queries is null && queryString is null)
+ {
+ // prompt the user
+ Console.WriteLine("Enter query (or press Enter to
exit): ");
+ }
- string line = queryString ?? input.ReadLine();
+ string line = queryString ?? input.ReadLine();
- if (line is null || line.Length == 0)
- {
- break;
- }
+ if (line is null || line.Length == 0)
+ {
+ break;
+ }
- line = line.Trim();
- if (line.Length == 0)
- {
- break;
- }
+ line = line.Trim();
+ if (line.Length == 0)
+ {
+ break;
+ }
- Query query = parser.Parse(line);
- Console.WriteLine("Searching for: " + query.ToString(field));
+ Query query = parser.Parse(line);
+ Console.WriteLine("Searching for: " +
query.ToString(field));
- if (repeat > 0) // repeat & time as benchmark
- {
- DateTime start = DateTime.UtcNow;
- for (int i = 0; i < repeat; i++)
+ if (repeat > 0) // repeat & time as benchmark
{
- searcher.Search(query, null, 100);
+ DateTime start = DateTime.UtcNow;
+ for (int i = 0; i < repeat; i++)
+ {
+ searcher.Search(query, null, 100);
+ }
+
+ DateTime end = DateTime.UtcNow;
+ Console.WriteLine("Time: " + (end -
start).TotalMilliseconds + "ms");
}
- DateTime end = DateTime.UtcNow;
- Console.WriteLine("Time: " + (end -
start).TotalMilliseconds + "ms");
- }
- DoPagingSearch(searcher, query, hitsPerPage, raw, queries is
null && queryString is null);
+ DoPagingSearch(searcher, query, hitsPerPage, raw, queries
is null && queryString is null);
- if (queryString != null)
- {
- break;
+ if (queryString != null)
+ {
+ break;
+ }
}
}
}
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index 857a30b96..8007532ad 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -1170,8 +1170,9 @@ namespace Lucene.Net.Facet.Taxonomy.Directory
}
AddDone(); // in case this wasn't previously called
- var ifs = new FileStream(tmpfile, FileMode.OpenOrCreate,
FileAccess.Read);
- using (var @in = new InputStreamDataInput(ifs))
+ // LUCENENET specific - dispose of resources
+ using (var ifs = new FileStream(tmpfile,
FileMode.OpenOrCreate, FileAccess.Read))
+ using (var @in = new InputStreamDataInput(ifs, leaveOpen:
true))
{
map = new int[@in.ReadInt32()];
// NOTE: The current code assumes here that the map is
complete,
diff --git a/src/Lucene.Net.Misc/Index/IndexSplitter.cs
b/src/Lucene.Net.Misc/Index/IndexSplitter.cs
index 26f51ed5f..5a842805d 100644
--- a/src/Lucene.Net.Misc/Index/IndexSplitter.cs
+++ b/src/Lucene.Net.Misc/Index/IndexSplitter.cs
@@ -167,7 +167,7 @@ namespace Lucene.Net.Index
public virtual void Split(DirectoryInfo destDir, ICollection<string>
segs) // LUCENENET specific - changed to ICollection to reduce copy operations
{
destDir.Create();
- FSDirectory destFSDir = FSDirectory.Open(destDir);
+ using FSDirectory destFSDir = FSDirectory.Open(destDir); //
LUCENENET specific - CA2000: dispose of destFSDir when finished
SegmentInfos destInfos = new SegmentInfos();
destInfos.Counter = Infos.Counter;
foreach (string n in segs)
diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
index 99910ec5a..a0495b7e2 100644
--- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
+++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
@@ -73,7 +73,7 @@ namespace Lucene.Net.Index
// wrap a potentially read-only input
// this way we don't have to preserve original deletions because
neither
// deleteDocument(int) or undeleteAll() is applied to the wrapped
input index.
- FakeDeleteIndexReader input = new FakeDeleteIndexReader(@in);
+ using FakeDeleteIndexReader input = new
FakeDeleteIndexReader(@in); // LUCENENET: CA2000: Dispose FakeDeleteIndexReader
int maxDoc = input.MaxDoc;
int partLen = maxDoc / numParts;
for (int i = 0; i < numParts; i++)
diff --git a/src/Lucene.Net.Misc/Misc/GetTermInfo.cs
b/src/Lucene.Net.Misc/Misc/GetTermInfo.cs
index cd682a7b1..a5caf8d69 100644
--- a/src/Lucene.Net.Misc/Misc/GetTermInfo.cs
+++ b/src/Lucene.Net.Misc/Misc/GetTermInfo.cs
@@ -50,31 +50,37 @@ namespace Lucene.Net.Misc
/// <exception cref="ArgumentException">Thrown if the incorrect number
of arguments are provided</exception>
public static void Main(string[] args)
{
-
- FSDirectory dir; // LUCENENET: IDE0059: Remove unnecessary value
assignment
- string inputStr; // LUCENENET: IDE0059: Remove unnecessary value
assignment
- string field; // LUCENENET: IDE0059: Remove unnecessary value
assignment
-
- if (args.Length == 3)
+ // LUCENENET specific - CA2000: dispose of directory when finished
+ FSDirectory dir = null;
+ try
{
- dir = FSDirectory.Open(new DirectoryInfo(args[0]));
- field = args[1];
- inputStr = args[2];
+ string inputStr; // LUCENENET: IDE0059: Remove unnecessary
value assignment
+ string field; // LUCENENET: IDE0059: Remove unnecessary value
assignment
+ if (args.Length == 3)
+ {
+ dir = FSDirectory.Open(new DirectoryInfo(args[0]));
+ field = args[1];
+ inputStr = args[2];
+ }
+ else
+ {
+ // LUCENENET specific - our wrapper console shows the
correct usage
+ throw new ArgumentException("GetTermInfo requires 3
arguments", nameof(args));
+ //Usage();
+ //Environment.Exit(1);
+ }
+
+ TermInfo(dir, new Term(field, inputStr));
}
- else
+ finally
{
- // LUCENENET specific - our wrapper console shows the correct
usage
- throw new ArgumentException("GetTermInfo requires 3
arguments", nameof(args));
- //Usage();
- //Environment.Exit(1);
+ dir?.Dispose();
}
-
- TermInfo(dir, new Term(field, inputStr));
}
public static void TermInfo(Store.Directory dir, Term term)
{
- IndexReader reader = DirectoryReader.Open(dir);
+ using IndexReader reader = DirectoryReader.Open(dir);
Console.WriteLine("{0}:{1} \t totalTF = {2:#,##0} \t doc freq =
{3:#,##0} \n", term.Field, term.Text, reader.TotalTermFreq(term),
reader.DocFreq(term));
}
diff --git a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
index 133f7fe82..afc30f40c 100644
--- a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
+++ b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
@@ -73,7 +73,7 @@ namespace Lucene.Net.Misc
//Environment.Exit(1);
}
- Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[0]));
+ using Store.Directory dir = FSDirectory.Open(new
DirectoryInfo(args[0]));
IComparer<TermStats> comparer = DocFreqComparer.Default;
diff --git a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
index 42e16c7fd..b423ed483 100644
--- a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
+++ b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs
@@ -181,7 +181,7 @@ namespace Lucene.Net.Spatial.Queries
throw new ArgumentNullException(nameof(body));
var map = new Dictionary<string, string>();
- StringTokenizer st = new StringTokenizer(body, " \n\t");
+ using StringTokenizer st = new StringTokenizer(body, " \n\t");
while (st.MoveNext())
{
diff --git a/src/Lucene.Net.Suggest/Suggest/Lookup.cs
b/src/Lucene.Net.Suggest/Suggest/Lookup.cs
index a644eece3..fb4969d2b 100644
--- a/src/Lucene.Net.Suggest/Suggest/Lookup.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Lookup.cs
@@ -274,14 +274,14 @@ namespace Lucene.Net.Search.Suggest
/// </summary>
public virtual bool Store(Stream output)
{
- DataOutput dataOut = new OutputStreamDataOutput(output);
+ var dataOut = new OutputStreamDataOutput(output, leaveOpen: true);
try
{
return Store(dataOut);
}
finally
{
- IOUtils.Dispose(output);
+ IOUtils.Dispose(dataOut, output); // LUCENENET specific -
dispose of dataOut
}
}
diff --git a/src/Lucene.Net.TestFramework/Util/TestUtil.cs
b/src/Lucene.Net.TestFramework/Util/TestUtil.cs
index 1c4bd1b6a..d270d069e 100644
--- a/src/Lucene.Net.TestFramework/Util/TestUtil.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestUtil.cs
@@ -99,7 +99,7 @@ namespace Lucene.Net.Util
/// <summary>
/// Convenience method unzipping <paramref name="zipFileStream"/> into
<paramref name="destDir"/>, cleaning up
- /// <paramref name="destDir"/> first.
+ /// <paramref name="destDir"/> first.
/// </summary>
public static void Unzip(Stream zipFileStream, DirectoryInfo destDir)
{
@@ -180,7 +180,7 @@ namespace Lucene.Net.Util
{
if (LuceneTestCase.UseInfoStream)
{
- checker.FlushInfoStream();
+ checker.FlushInfoStream();
Console.WriteLine(bos.ToString());
}
return indexStatus;
@@ -201,8 +201,9 @@ namespace Lucene.Net.Util
public static void CheckReader(AtomicReader reader, bool
crossCheckTermVectors)
{
- ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
- StreamWriter infoStream = new StreamWriter(bos, Encoding.UTF8);
+ // LUCENENET: dispose the StreamWriter and ByteArrayOutputStream
when done
+ using ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+ using StreamWriter infoStream = new StreamWriter(bos,
Encoding.UTF8, leaveOpen: true, bufferSize: 1024);
reader.CheckIntegrity();
CheckIndex.Status.FieldNormStatus fieldNormStatus =
Index.CheckIndex.TestFieldNorms(reader, infoStream);
@@ -591,8 +592,8 @@ namespace Lucene.Net.Util
public static bool FieldSupportsHugeBinaryDocValues(string field)
{
string dvFormat = GetDocValuesFormat(field);
- if (dvFormat.Equals("Lucene40", StringComparison.Ordinal)
- || dvFormat.Equals("Lucene42", StringComparison.Ordinal)
+ if (dvFormat.Equals("Lucene40", StringComparison.Ordinal)
+ || dvFormat.Equals("Lucene42", StringComparison.Ordinal)
|| dvFormat.Equals("Memory", StringComparison.Ordinal))
{
return false;
@@ -868,7 +869,7 @@ namespace Lucene.Net.Util
/// Returns a valid (compiling) <see cref="Regex"/> instance with
random stuff inside. Be careful
/// when applying random patterns to longer strings as certain types
of patterns
/// may explode into exponential times in backtracking implementations
(such as Java's).
- /// </summary>
+ /// </summary>
public static Regex RandomRegex(Random random) // LUCENENET specific -
renamed from RandomPattern()
{
return
RandomizedTesting.Generators.RandomExtensions.NextRegex(random); // LUCENENET:
Moved general random data generation to RandomizedTesting.Generators
@@ -1059,4 +1060,4 @@ namespace Lucene.Net.Util
'\u3000'
};
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs
b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs
index aedaa40d1..19a6a4587 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs
@@ -32,30 +32,31 @@ namespace Lucene.Net.Search.Suggest.Fst
// LUCENENET specific - renaming from Main() because we must only have
1 entry point.
// Not sure why this utility is in a test project anyway - this seems
like something that should
// be in Lucene.Net.Suggest so we can put it into the lucene-cli tool.
- public static void Main2(string[] args)
+ public static void Main2(string[] args)
{
FileInfo input = new FileInfo("/home/dweiss/tmp/shuffled.dict");
- int buckets = 20;
- int shareMaxTail = 10;
+ const int buckets = 20;
+ const int shareMaxTail = 10;
ExternalRefSorter sorter = new ExternalRefSorter(new
OfflineSorter());
FSTCompletionBuilder builder = new FSTCompletionBuilder(buckets,
sorter, shareMaxTail);
- TextReader reader =
- new StreamReader(
- new FileStream(input.FullName, FileMode.Open),
Encoding.UTF8);
-
- BytesRef scratch = new BytesRef();
- string line;
- int count = 0;
- while ((line = reader.ReadLine()) != null)
+ // LUCENENET specific - dispose of fileStream and reader when done
+ using (FileStream fileStream = new FileStream(input.FullName,
FileMode.Open))
+ using (TextReader reader = new StreamReader(fileStream,
Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 1024,
leaveOpen: true))
{
- scratch.CopyChars(line);
- builder.Add(scratch, count % buckets);
- if ((count++ % 100000) == 0)
+ BytesRef scratch = new BytesRef();
+ string line;
+ int count = 0;
+ while ((line = reader.ReadLine()) != null)
{
- Console.WriteLine("Line: " + count);
+ scratch.CopyChars(line);
+ builder.Add(scratch, count % buckets);
+ if ((count++ % 100000) == 0)
+ {
+ Console.WriteLine("Line: " + count);
+ }
}
}
diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
index d4a68a4e6..2351e4aa2 100644
--- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
+++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs
@@ -384,7 +384,7 @@ namespace Lucene.Net.Codecs.Lucene45
long startFP = data.Position; // LUCENENET specific: Renamed
from getFilePointer() to match FileStream
// currently, we have to store the delta from expected for
every 1/nth term
// we could avoid this, but its not much and less overall RAM
than the previous approach!
- RAMOutputStream addressBuffer = new RAMOutputStream();
+ using RAMOutputStream addressBuffer = new RAMOutputStream();
MonotonicBlockPackedWriter termAddresses = new
MonotonicBlockPackedWriter(addressBuffer, BLOCK_SIZE);
BytesRef lastTerm = new BytesRef();
long count = 0;
diff --git a/src/Lucene.Net/Index/FlushPolicy.cs
b/src/Lucene.Net/Index/FlushPolicy.cs
index 51280b06f..69b45c3c6 100644
--- a/src/Lucene.Net/Index/FlushPolicy.cs
+++ b/src/Lucene.Net/Index/FlushPolicy.cs
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
// the dwpt which needs to be flushed eventually
ThreadState maxRamUsingThreadState = perThreadState;
if (Debugging.AssertsEnabled)
Debugging.Assert(!perThreadState.flushPending, "DWPT should have flushed");
- IEnumerator<ThreadState> activePerThreadsIterator =
control.AllActiveThreadStates();
+ using IEnumerator<ThreadState> activePerThreadsIterator =
control.AllActiveThreadStates();
while (activePerThreadsIterator.MoveNext())
{
ThreadState next = activePerThreadsIterator.Current;
@@ -158,4 +158,4 @@ namespace Lucene.Net.Index
return clone;
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Store/InputStreamDataInput.cs
b/src/Lucene.Net/Store/InputStreamDataInput.cs
index 8b450561b..0a31c4540 100644
--- a/src/Lucene.Net/Store/InputStreamDataInput.cs
+++ b/src/Lucene.Net/Store/InputStreamDataInput.cs
@@ -27,13 +27,33 @@ namespace Lucene.Net.Store
public class InputStreamDataInput : DataInput, IDisposable
{
private readonly Stream _is;
- private int disposed = 0; // LUCENENET specific - allow double-dispose
+ private int disposed; // LUCENENET specific - allow double-dispose
+ private readonly bool leaveOpen; // LUCENENET specific - added to
allow the stream to be left open
+ /// <summary>
+ /// Initializes a new instance of <see cref="InputStreamDataInput"/>
with the specified <paramref name="is"/> (input stream).
+ /// </summary>
+ /// <param name="is">The input stream to read from.</param>
+ /// <exception cref="ArgumentNullException">If <paramref name="is"/>
is <c>null</c>.</exception>
public InputStreamDataInput(Stream @is)
{
this._is = @is ?? throw new ArgumentNullException(nameof(@is)); //
LUCENENET specific - added null guard clause
}
+ /// <inheritdoc cref="InputStreamDataInput(Stream)"/>
+ /// <summary>
+ /// Initializes a new instance of <see cref="InputStreamDataInput"/>
with the specified <paramref name="is"/> (input stream) and <paramref
name="leaveOpen"/> flag.
+ /// </summary>
+ /// <param name="leaveOpen">If <c>true</c>, the stream will not be
disposed when this instance is disposed.</param>
+ /// <remarks>
+ /// LUCENENET specific - added to allow the stream to be left open.
+ /// </remarks>
+ public InputStreamDataInput(Stream @is, bool leaveOpen)
+ : this(@is)
+ {
+ this.leaveOpen = leaveOpen;
+ }
+
public override byte ReadByte()
{
int v = _is.ReadByte();
@@ -71,8 +91,11 @@ namespace Lucene.Net.Store
if (disposing)
{
- _is.Dispose();
+ if (!leaveOpen)
+ {
+ _is.Dispose();
+ }
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Store/LockStressTest.cs
b/src/Lucene.Net/Store/LockStressTest.cs
index 2614a6434..1ef271052 100644
--- a/src/Lucene.Net/Store/LockStressTest.cs
+++ b/src/Lucene.Net/Store/LockStressTest.cs
@@ -4,6 +4,7 @@ using System.Globalization;
using System.IO;
using System.Net;
using System.Net.Sockets;
+using System.Text;
using System.Threading;
using Console = Lucene.Net.Util.SystemConsole;
@@ -144,8 +145,8 @@ namespace Lucene.Net.Store
socket.Connect(verifierHost, verifierPort);
using Stream stream = new NetworkStream(socket);
- BinaryReader intReader = new BinaryReader(stream);
- BinaryWriter intWriter = new BinaryWriter(stream);
+ using BinaryReader intReader = new BinaryReader(stream,
Encoding.UTF8, leaveOpen: true);
+ using BinaryWriter intWriter = new BinaryWriter(stream,
Encoding.UTF8, leaveOpen: true);
intWriter.Write(myID);
stream.Flush();
diff --git a/src/Lucene.Net/Store/LockVerifyServer.cs
b/src/Lucene.Net/Store/LockVerifyServer.cs
index cf087d754..876da8ebc 100644
--- a/src/Lucene.Net/Store/LockVerifyServer.cs
+++ b/src/Lucene.Net/Store/LockVerifyServer.cs
@@ -6,6 +6,7 @@ using System.Globalization;
using System.IO;
using System.Net;
using System.Net.Sockets;
+using System.Text;
using System.Threading;
using Console = Lucene.Net.Util.SystemConsole;
@@ -90,7 +91,7 @@ namespace Lucene.Net.Store
object localLock = new object();
int[] lockedID = new int[1];
lockedID[0] = -1;
- CountdownEvent startingGun = new CountdownEvent(1);
+ using CountdownEvent startingGun = new CountdownEvent(1); //
LUCENENET specific - dispose when finished
ThreadJob[] threads = new ThreadJob[maxClients];
for (int count = 0; count < maxClients; count++)
@@ -134,8 +135,8 @@ namespace Lucene.Net.Store
public override void Run()
{
using Stream stream = new NetworkStream(cs);
- BinaryReader intReader = new BinaryReader(stream);
- BinaryWriter intWriter = new BinaryWriter(stream);
+ using BinaryReader intReader = new BinaryReader(stream,
Encoding.UTF8, leaveOpen: true);
+ using BinaryWriter intWriter = new BinaryWriter(stream,
Encoding.UTF8, leaveOpen: true);
try
{
int id = intReader.ReadInt32();
diff --git a/src/Lucene.Net/Store/OutputStreamDataOutput.cs
b/src/Lucene.Net/Store/OutputStreamDataOutput.cs
index 7a96de6a8..6949ea481 100644
--- a/src/Lucene.Net/Store/OutputStreamDataOutput.cs
+++ b/src/Lucene.Net/Store/OutputStreamDataOutput.cs
@@ -27,13 +27,33 @@ namespace Lucene.Net.Store
public class OutputStreamDataOutput : DataOutput, IDisposable
{
private readonly Stream _os;
- private int disposed = 0; // LUCENENET specific - allow double-dispose
+ private int disposed; // LUCENENET specific - allow double-dispose
+ private readonly bool leaveOpen; // LUCENENET specific - added to
allow the stream to be left open
+ /// <summary>
+ /// Initializes a new instance of <see cref="OutputStreamDataOutput"/>
with the specified <paramref name="os"/> (output stream).
+ /// </summary>
+ /// <param name="os">The output stream to write to.</param>
+ /// <exception cref="ArgumentNullException">If <paramref name="os"/>
is <c>null</c>.</exception>
public OutputStreamDataOutput(Stream os)
{
this._os = os ?? throw new ArgumentNullException(nameof(os)); //
LUCENENET specific - added null guard clause
}
+ /// <inheritdoc cref="OutputStreamDataOutput(Stream)"/>
+ /// <summary>
+ /// Initializes a new instance of <see cref="OutputStreamDataOutput"/>
with the specified <paramref name="os"/> (output stream) and <paramref
name="leaveOpen"/> flag.
+ /// </summary>
+ /// <param name="leaveOpen">If <c>true</c>, the stream will not be
disposed when this instance is disposed.</param>
+ /// <remarks>
+ /// LUCENENET specific - added to allow the stream to be left open.
+ /// </remarks>
+ public OutputStreamDataOutput(Stream os, bool leaveOpen)
+ : this(os)
+ {
+ this.leaveOpen = leaveOpen;
+ }
+
public override void WriteByte(byte b)
{
_os.WriteByte(b);
@@ -66,8 +86,11 @@ namespace Lucene.Net.Store
if (disposing)
{
- _os.Dispose();
+ if (!leaveOpen)
+ {
+ _os.Dispose();
+ }
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Util/Constants.cs b/src/Lucene.Net/Util/Constants.cs
index a49694c94..cd30a3e48 100644
--- a/src/Lucene.Net/Util/Constants.cs
+++ b/src/Lucene.Net/Util/Constants.cs
@@ -178,9 +178,10 @@ namespace Lucene.Net.Util
{
const string subkey = @"SOFTWARE\Microsoft\NET Framework
Setup\NDP\v4\Full\";
- // As an alternative, if you know the computers you will query are
running .NET Framework 4.5
+ // As an alternative, if you know the computers you will query are
running .NET Framework 4.5
// or later, you can use:
- using RegistryKey ndpKey =
RegistryKey.OpenBaseKey(RegistryHive.LocalMachine,
RegistryView.Registry32).OpenSubKey(subkey);
+ using RegistryKey baseKey =
RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32);
+ using RegistryKey ndpKey = baseKey.OpenSubKey(subkey);
object releaseValue;
if (ndpKey != null && (releaseValue = ndpKey.GetValue("Release"))
!= null)
{
diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs
index c10f55aef..d3b85e873 100644
--- a/src/Lucene.Net/Util/Fst/FST.cs
+++ b/src/Lucene.Net/Util/Fst/FST.cs
@@ -511,7 +511,7 @@ namespace Lucene.Net.Util.Fst
@out.WriteByte(1);
// Serialize empty-string output:
- var ros = new RAMOutputStream();
+ using var ros = new RAMOutputStream();
Outputs.WriteFinalOutput(emptyOutput, ros);
var emptyOutputBytes = new byte[(int)ros.Position]; //
LUCENENET specific: Renamed from getFilePointer() to match FileStream