This is an automated email from the ASF dual-hosted git repository.

paulirwin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git


The following commit(s) were added to refs/heads/master by this push:
     new 3c14b14e2 Add comments to empty blocks to address csharpsquid:S108, 
#672 (#1032)
3c14b14e2 is described below

commit 3c14b14e27ccbb326819cdd0def36c48e9a67e2a
Author: Paul Irwin <[email protected]>
AuthorDate: Mon Nov 18 21:37:01 2024 -0700

    Add comments to empty blocks to address csharpsquid:S108, #672 (#1032)
---
 .../Miscellaneous/LimitTokenCountFilter.cs         | 17 +++----
 .../Miscellaneous/LimitTokenPositionFilter.cs      | 15 +++---
 .../Analysis/Miscellaneous/TrimFilter.cs           |  5 +-
 .../Analysis/Sinks/TeeSinkTokenFilter.cs           | 11 +++--
 .../Tartarus/Snowball/Ext/BasqueStemmer.cs         |  6 +--
 .../Tartarus/Snowball/Ext/CatalanStemmer.cs        |  4 +-
 .../Tartarus/Snowball/Ext/DutchStemmer.cs          |  8 ++--
 .../Tartarus/Snowball/Ext/EnglishStemmer.cs        |  6 +--
 .../Tartarus/Snowball/Ext/FrenchStemmer.cs         |  8 ++--
 .../Tartarus/Snowball/Ext/German2Stemmer.cs        |  8 ++--
 .../Tartarus/Snowball/Ext/GermanStemmer.cs         | 56 +++++++++++-----------
 .../Tartarus/Snowball/Ext/ItalianStemmer.cs        |  8 ++--
 .../Tartarus/Snowball/Ext/KpStemmer.cs             | 14 +++---
 .../Tartarus/Snowball/Ext/PorterStemmer.cs         |  6 +--
 .../Tartarus/Snowball/Ext/PortugueseStemmer.cs     |  6 +--
 .../Tartarus/Snowball/Ext/RomanianStemmer.cs       |  8 ++--
 .../Tartarus/Snowball/Ext/SpanishStemmer.cs        |  4 +-
 .../Tartarus/Snowball/Ext/TurkishStemmer.cs        |  4 +-
 .../Util/ToStringUtil.cs                           |  2 +-
 .../Language/Bm/PhoneticEngine.cs                  |  2 +-
 .../Language/Bm/Rule.cs                            |  1 +
 src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs |  7 ++-
 .../Hhmm/BigramDictionary.cs                       | 10 ++--
 .../Hhmm/WordDictionary.cs                         | 32 ++++++-------
 .../Egothor.Stemmer/MultiTrie2.cs                  |  6 ++-
 .../ByTask/Tasks/AnalyzerFactoryTask.cs            |  4 +-
 src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs |  6 +--
 src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs |  2 +
 .../Support/TagSoup/XMLWriter.cs                   |  1 +
 .../SimpleText/SimpleTextStoredFieldsWriter.cs     |  5 +-
 .../SimpleText/SimpleTextTermVectorsWriter.cs      |  3 +-
 .../Highlight/Highlighter.cs                       |  7 +--
 .../VectorHighlight/BaseFragmentsBuilder.cs        |  4 +-
 src/Lucene.Net.QueryParser/Classic/QueryParser.cs  |  1 +
 .../Standard/Parser/StandardSyntaxParser.cs        |  6 +--
 .../Parser/StandardSyntaxParserTokenManager.cs     |  2 +-
 .../Surround/Parser/QueryParser.cs                 |  2 +-
 .../Prefix/AbstractVisitingPrefixTreeFilter.cs     |  4 +-
 src/Lucene.Net.Suggest/Spell/SpellChecker.cs       |  7 ++-
 .../Suggest/Fst/FSTCompletionBuilder.cs            | 34 ++++++-------
 .../Codecs/Lucene3x/Lucene3xTermVectorsReader.cs   |  9 ++--
 src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs  |  3 +-
 .../Codecs/Lucene40/Lucene40StoredFieldsReader.cs  |  5 +-
 .../Codecs/Lucene40/Lucene40StoredFieldsWriter.cs  |  6 ++-
 .../Codecs/Lucene40/Lucene40TermVectorsWriter.cs   |  2 +
 src/Lucene.Net/Index/CheckIndex.cs                 |  5 ++
 src/Lucene.Net/Index/IndexWriter.cs                | 13 ++++-
 src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs   |  5 +-
 src/Lucene.Net/Store/Directory.cs                  |  9 ++--
 src/Lucene.Net/Store/LockStressTest.cs             |  5 +-
 src/Lucene.Net/Store/NativeFSLockFactory.cs        |  4 +-
 src/Lucene.Net/Store/RAMOutputStream.cs            |  3 +-
 src/Lucene.Net/Util/Fst/FST.cs                     |  2 +
 src/Lucene.Net/Util/VirtualMethod.cs               |  5 +-
 54 files changed, 234 insertions(+), 184 deletions(-)

diff --git 
a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilter.cs
 
b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilter.cs
index 6b6792096..e731514f8 100644
--- 
a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilter.cs
+++ 
b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilter.cs
@@ -25,12 +25,12 @@ namespace Lucene.Net.Analysis.Miscellaneous
     /// a replacement for the maximum field length setting inside <see 
cref="Index.IndexWriter"/>.
     /// <para>
     /// By default, this filter ignores any tokens in the wrapped <see 
cref="TokenStream"/>
-    /// once the limit has been reached, which can result in <see 
cref="Reset"/> being 
-    /// called prior to <see cref="IncrementToken"/> returning <c>false</c>.  
For most 
-    /// <see cref="TokenStream"/> implementations this should be acceptable, 
and faster 
-    /// then consuming the full stream. If you are wrapping a <see 
cref="TokenStream"/> 
-    /// which requires that the full stream of tokens be exhausted in order to 
-    /// function properly, use the 
+    /// once the limit has been reached, which can result in <see 
cref="Reset"/> being
+    /// called prior to <see cref="IncrementToken"/> returning <c>false</c>.  
For most
+    /// <see cref="TokenStream"/> implementations this should be acceptable, 
and faster
+    /// then consuming the full stream. If you are wrapping a <see 
cref="TokenStream"/>
+    /// which requires that the full stream of tokens be exhausted in order to
+    /// function properly, use the
     /// <see 
cref="LimitTokenCountFilter.LimitTokenCountFilter(TokenStream,int,bool)"/> 
consumeAllTokens
     /// option.
     /// </para>
@@ -91,8 +91,9 @@ namespace Lucene.Net.Analysis.Miscellaneous
             }
             else
             {
-                while (consumeAllTokens && m_input.IncrementToken()) // NOOP
+                while (consumeAllTokens && m_input.IncrementToken())
                 {
+                    // NOOP
                 }
                 return false;
             }
@@ -105,4 +106,4 @@ namespace Lucene.Net.Analysis.Miscellaneous
             exhausted = false;
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilter.cs
 
b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilter.cs
index 6b680643a..f4942169c 100644
--- 
a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilter.cs
+++ 
b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilter.cs
@@ -26,12 +26,12 @@ namespace Lucene.Net.Analysis.Miscellaneous
     /// are not greater than the configured limit.
     /// <para>
     /// By default, this filter ignores any tokens in the wrapped <see 
cref="TokenStream"/>
-    /// once the limit has been exceeded, which can result in <see 
cref="Reset"/> being 
-    /// called prior to <see cref="IncrementToken"/> returning <c>false</c>.  
For most 
-    /// <see cref="TokenStream"/> implementations this should be acceptable, 
and faster 
+    /// once the limit has been exceeded, which can result in <see 
cref="Reset"/> being
+    /// called prior to <see cref="IncrementToken"/> returning <c>false</c>.  
For most
+    /// <see cref="TokenStream"/> implementations this should be acceptable, 
and faster
     /// then consuming the full stream. If you are wrapping a <see 
cref="TokenStream"/>
-    /// which requires that the full stream of tokens be exhausted in order to 
-    /// function properly, use the 
+    /// which requires that the full stream of tokens be exhausted in order to
+    /// function properly, use the
     /// <see cref="LimitTokenPositionFilter(TokenStream,int,bool)"/> 
consumeAllTokens
     /// option.
     /// </para>
@@ -91,8 +91,9 @@ namespace Lucene.Net.Analysis.Miscellaneous
                 }
                 else
                 {
-                    while (consumeAllTokens && m_input.IncrementToken()) // 
NOOP
+                    while (consumeAllTokens && m_input.IncrementToken())
                     {
+                        // NOOP
                     }
                     exhausted = true;
                     return false;
@@ -112,4 +113,4 @@ namespace Lucene.Net.Analysis.Miscellaneous
             exhausted = false;
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs 
b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs
index ee0dfdf80..b90e137f6 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
         /// <param name="version">       the Lucene match version </param>
         /// <param name="in">            the stream to consume </param>
         /// <param name="updateOffsets"> whether to update offsets </param>
-        /// @deprecated Offset updates are not supported anymore as of Lucene 
4.4. 
+        /// @deprecated Offset updates are not supported anymore as of Lucene 
4.4.
         [Obsolete("Offset updates are not supported anymore as of Lucene 
4.4.")]
         public TrimFilter(LuceneVersion version, TokenStream @in, bool 
updateOffsets)
             : base(@in)
@@ -84,6 +84,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             // eat the first characters
             for (start = 0; start < len && 
char.IsWhiteSpace(termBuffer[start]); start++)
             {
+                // LUCENENET: intentionally empty
             }
             // eat the end characters
             for (end = len; end >= start && char.IsWhiteSpace(termBuffer[end - 
1]); end--)
@@ -111,4 +112,4 @@ namespace Lucene.Net.Analysis.Miscellaneous
             return true;
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs 
b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
index 03f694210..14e3e2610 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
@@ -35,16 +35,16 @@ namespace Lucene.Net.Analysis.Sinks
     /// TeeSinkTokenFilter source1 = new TeeSinkTokenFilter(new 
WhitespaceTokenizer(version, reader1));
     /// TeeSinkTokenFilter.SinkTokenStream sink1 = 
source1.NewSinkTokenStream();
     /// TeeSinkTokenFilter.SinkTokenStream sink2 = 
source1.NewSinkTokenStream();
-    /// 
+    ///
     /// TeeSinkTokenFilter source2 = new TeeSinkTokenFilter(new 
WhitespaceTokenizer(version, reader2));
     /// source2.AddSinkTokenStream(sink1);
     /// source2.AddSinkTokenStream(sink2);
-    /// 
+    ///
     /// TokenStream final1 = new LowerCaseFilter(version, source1);
     /// TokenStream final2 = source2;
     /// TokenStream final3 = new EntityDetect(sink1);
     /// TokenStream final4 = new URLDetect(sink2);
-    /// 
+    ///
     /// d.Add(new TextField("f1", final1, Field.Store.NO));
     /// d.Add(new TextField("f2", final2, Field.Store.NO));
     /// d.Add(new TextField("f3", final3, Field.Store.NO));
@@ -131,6 +131,7 @@ namespace Lucene.Net.Analysis.Sinks
         {
             while (IncrementToken())
             {
+                // LUCENENET: intentionally empty
             }
         }
 
@@ -182,7 +183,7 @@ namespace Lucene.Net.Analysis.Sinks
         {
             /// <summary>
             /// Returns true, iff the current state of the passed-in <see 
cref="AttributeSource"/> shall be stored
-            /// in the sink. 
+            /// in the sink.
             /// </summary>
             public abstract bool Accept(AttributeSource source);
 
@@ -271,4 +272,4 @@ namespace Lucene.Net.Analysis.Sinks
             }
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/BasqueStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/BasqueStemmer.cs
index d3a2fba7d..ced24409d 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/BasqueStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/BasqueStemmer.cs
@@ -1007,7 +1007,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab2:
                 m_cursor = m_limit - v_2;
                 goto replab1;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab1:
             // repeat, line 143
@@ -1036,7 +1036,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab4:
                 m_cursor = m_limit - v_3;
                 goto replab3;
-                end_of_outer_loop_2: { }
+                end_of_outer_loop_2: { /* LUCENENET: intentionally empty */ }
             }
             replab3:
             // do, line 144
@@ -1066,4 +1066,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/CatalanStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/CatalanStemmer.cs
index bce4bc1fd..0e868af31 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/CatalanStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/CatalanStemmer.cs
@@ -840,7 +840,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1162,4 +1162,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/DutchStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/DutchStemmer.cs
index 43942bf3f..c7425b40f 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/DutchStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/DutchStemmer.cs
@@ -191,7 +191,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_2;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             m_cursor = v_1;
@@ -301,7 +301,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab4:
                 m_cursor = v_4;
                 goto replab3;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab3:
             return true;
@@ -475,7 +475,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1015,4 +1015,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/EnglishStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/EnglishStemmer.cs
index 849722981..857e02c2f 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/EnglishStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/EnglishStemmer.cs
@@ -321,7 +321,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     lab4:
                     m_cursor = v_4;
                     goto replab3;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab3: {/* LUCENENET: intentionally blank */}
             } while (false);
@@ -1318,7 +1318,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1553,4 +1553,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/FrenchStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/FrenchStemmer.cs
index c883fab5d..b53f1eb81 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/FrenchStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/FrenchStemmer.cs
@@ -395,7 +395,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -655,7 +655,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1522,7 +1522,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     } while (false);
                     lab1:
                     goto replab0;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab0:
                 if (v_1 > 0)
@@ -1773,4 +1773,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/German2Stemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/German2Stemmer.cs
index 0337fd758..2492e943a 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/German2Stemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/German2Stemmer.cs
@@ -210,7 +210,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_2;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             m_cursor = v_1;
@@ -292,7 +292,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab7:
                 m_cursor = v_5;
                 goto replab6;
-                end_of_outer_loop_2: { }
+                end_of_outer_loop_2: { /* LUCENENET: intentionally empty */ }
             }
             replab6:
             return true;
@@ -497,7 +497,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -893,4 +893,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/GermanStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/GermanStemmer.cs
index ef20dc014..9786a358f 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/GermanStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/GermanStemmer.cs
@@ -112,20 +112,20 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             // test, line 30
             v_1 = m_cursor;
             // repeat, line 30
-            
+
             while (true)
             {
                 v_2 = m_cursor;
-                
+
                 do
                 {
                     // (, line 30
                     // or, line 33
-                    
+
                     do
                     {
                         v_3 = m_cursor;
-                        
+
                         do
                         {
                             // (, line 31
@@ -160,29 +160,29 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     //continue replab0;
 
                     goto end_of_outer_loop;
-                    
+
                 } while (false);
                 lab1:
                 m_cursor = v_2;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             m_cursor = v_1;
             // repeat, line 36
-           
+
             while (true)
             {
                 v_4 = m_cursor;
-                
+
                 do
                 {
                     // goto, line 36
-                    
+
                     while (true)
                     {
                         v_5 = m_cursor;
-                        
+
                         do
                         {
                             // (, line 36
@@ -193,11 +193,11 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                             // [, line 37
                             m_bra = m_cursor;
                             // or, line 37
-                            
+
                             do
                             {
                                 v_6 = m_cursor;
-                                
+
                                 do
                                 {
                                     // (, line 37
@@ -254,12 +254,12 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     //continue replab4;
 
                     goto end_of_outer_loop_2;
-                    
+
                 } while (false);
                 lab5:
                 m_cursor = v_4;
                 goto replab4;
-                end_of_outer_loop_2: { }
+                end_of_outer_loop_2: { /* LUCENENET: intentionally empty */ }
             }
             replab4:
             return true;
@@ -287,10 +287,10 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             I_x = m_cursor;
             m_cursor = v_1;
             // gopast, line 49
-            
+
             while (true)
             {
-                
+
                 do
                 {
                     if (!(InGrouping(g_v, 97, 252)))
@@ -308,10 +308,10 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             }
             golab0:
             // gopast, line 49
-            
+
             while (true)
             {
-                
+
                 do
                 {
                     if (!(OutGrouping(g_v, 97, 252)))
@@ -331,7 +331,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             // setmark p1, line 49
             I_p1 = m_cursor;
             // try, line 50
-            
+
             do
             {
                 // (, line 50
@@ -343,7 +343,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             } while (false);
             lab4:
             // gopast, line 51
-            
+
             while (true)
             {
                 do
@@ -363,10 +363,10 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             }
             golab5:
             // gopast, line 51
-            
+
             while (true)
             {
-                
+
                 do
                 {
                     if (!(OutGrouping(g_v, 97, 252)))
@@ -393,11 +393,11 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             int among_var;
             int v_1;
             // repeat, line 55
-            
+
             while (true)
             {
                 v_1 = m_cursor;
-                
+
                 do
                 {
                     // (, line 55
@@ -458,12 +458,12 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     //continue replab0;
 
                     goto end_of_outer_loop;
-                    
+
                 } while (false);
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -777,7 +777,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return true;
         }
 
-        
+
                 public override bool Stem()
         {
             int v_1;
@@ -848,4 +848,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/ItalianStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/ItalianStemmer.cs
index 4f1c68819..b30c8ed8b 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/ItalianStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/ItalianStemmer.cs
@@ -364,7 +364,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_2;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             m_cursor = v_1;
@@ -452,7 +452,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab3:
                 m_cursor = v_3;
                 goto replab2;
-                end_of_outer_loop_2: { }
+                end_of_outer_loop_2: { /* LUCENENET: intentionally empty */ }
             }
             replab2:
             return true;
@@ -757,7 +757,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1346,4 +1346,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
index 170c04da5..3db3443e6 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
@@ -1905,7 +1905,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     } while (false);
                     lab3:
                     goto replab2;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab2:
                 // atleast, line 209
@@ -1953,7 +1953,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                         lab5:
                         m_cursor = v_5;
                         goto replab4;
-                        end_of_outer_loop_2: { }
+                        end_of_outer_loop_2: { /* LUCENENET: intentionally 
empty */ }
                     }
                     replab4:
                     if (v_4 > 0)
@@ -1988,7 +1988,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     } while (false);
                     lab9:
                     goto replab8;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab8:
                 // atleast, line 210
@@ -2036,7 +2036,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                         lab11:
                         m_cursor = v_9;
                         goto replab10;
-                        end_of_outer_loop_2: { }
+                        end_of_outer_loop_2: { /* LUCENENET: intentionally 
empty */ }
                     }
                     replab10:
                     if (v_8 > 0)
@@ -2164,7 +2164,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     lab3:
                     m_cursor = v_3;
                     goto replab2;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab2: {/* LUCENENET: intentionally blank */}
             } while (false);
@@ -2437,7 +2437,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     lab20:
                     m_cursor = v_19;
                     goto replab19;
-                    end_of_outer_loop_2: { }
+                    end_of_outer_loop_2: { /* LUCENENET: intentionally empty 
*/ }
                 }
                 replab19: {/* LUCENENET: intentionally blank */}
             } while (false);
@@ -2456,4 +2456,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
index b0cc8acb0..d2554537d 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
@@ -778,7 +778,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     lab3:
                     m_cursor = v_3;
                     goto replab2;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab2: {/* LUCENENET: intentionally blank */}
             } while (false);
@@ -1033,7 +1033,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     lab25:
                     m_cursor = v_19;
                     goto replab24;
-                    end_of_outer_loop_2: { }
+                    end_of_outer_loop_2: { /* LUCENENET: intentionally empty 
*/ }
                 }
                 replab24: {/* LUCENENET: intentionally blank */}
             } while (false);
@@ -1052,4 +1052,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PortugueseStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PortugueseStemmer.cs
index f141f92b1..35ea03e21 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PortugueseStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PortugueseStemmer.cs
@@ -331,7 +331,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -636,7 +636,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1269,4 +1269,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/RomanianStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/RomanianStemmer.cs
index 671558282..c7ebe4d10 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/RomanianStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/RomanianStemmer.cs
@@ -390,7 +390,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -695,7 +695,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -901,7 +901,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = m_limit - v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             // [, line 132
@@ -1186,4 +1186,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/SpanishStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/SpanishStemmer.cs
index 2392e8faa..c0fc4f2aa 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/SpanishStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/SpanishStemmer.cs
@@ -600,7 +600,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 lab1:
                 m_cursor = v_1;
                 goto replab0;
-                end_of_outer_loop: { }
+                end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
             }
             replab0:
             return true;
@@ -1320,4 +1320,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git 
a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/TurkishStemmer.cs 
b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/TurkishStemmer.cs
index a94f62503..7100948ad 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/TurkishStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/TurkishStemmer.cs
@@ -3518,7 +3518,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                     lab1:
                     m_cursor = v_3;
                     goto replab0;
-                    end_of_outer_loop: { }
+                    end_of_outer_loop: { /* LUCENENET: intentionally empty */ }
                 }
                 replab0:
                 if (v_2 > 0)
@@ -3739,4 +3739,4 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
             return this.GetType().FullName.GetHashCode();
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs 
b/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs
index 86de6d408..e25e6ffe7 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs
@@ -1391,7 +1391,7 @@ namespace Lucene.Net.Analysis.Ja.Util
                         builder.Append(ch);
                         break;
                 }
-                break_main: { }
+                break_main: { /* LUCENENET: intentionally empty */ }
             }
         }
     }
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs 
b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
index 4db2ee385..3950e7559 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs
@@ -153,7 +153,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                         }
                     }
                 }
-            EXPR_break: { }
+            EXPR_break: { /* LUCENENET: intentionally empty */}
 
                 this.phonemes.Clear();
                 this.phonemes.UnionWith(newPhonemes);
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs 
b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
index fb2b53fbd..7e30152f7 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
@@ -338,6 +338,7 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm
                 nameTypes.TryGetValue(rt, out var ruleTypes) && ruleTypes != 
null &&
                 ruleTypes.TryGetValue(lang, out var rules) && rules != null)
             {
+                // LUCENENET: intentionally empty
             }
             else
             {
diff --git a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs 
b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs
index e8287a742..c4a27e420 100644
--- a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs
@@ -76,9 +76,12 @@ namespace Lucene.Net.Analysis.Phonetic
                     phonetic = v;
                 }
             }
-            catch (Exception ignored) when (ignored.IsException()) { } // just 
use the direct text
+            catch (Exception ignored) when (ignored.IsException())
+            {
+                // just use the direct text
+            }
 
-                if (phonetic is null) return true;
+            if (phonetic is null) return true;
 
             if (!inject)
             {
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs 
b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
index 457770bfa..b9d16273a 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs
@@ -43,7 +43,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         public const int PRIME_BIGRAM_LENGTH = 402137;
 
         /// <summary>
-        /// The word associations are stored as FNV1 hashcodes, which have a 
small probability of collision, but save memory.  
+        /// The word associations are stored as FNV1 hashcodes, which have a 
small probability of collision, but save memory.
         /// </summary>
         private long[] bigramHashTable;
 
@@ -107,11 +107,11 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         // The data in Lucene is stored in a proprietary binary format 
(similar to
         // .NET's BinarySerializer) that cannot be read back in .NET. 
Therefore, the
         // data was extracted using Java's DataOutputStream using the 
following Java code.
-        // It can then be read in using the LoadFromInputStream method below 
+        // It can then be read in using the LoadFromInputStream method below
         // (using a DataInputStream instead of a BinaryReader), and saved
         // in the correct (BinaryWriter) format by calling the SaveToObj 
method.
         // Alternatively, the data can be loaded from disk using the files
-        // here(https://issues.apache.org/jira/browse/LUCENE-1629) in the 
analysis.data.zip file, 
+        // here(https://issues.apache.org/jira/browse/LUCENE-1629) in the 
analysis.data.zip file,
         // which will automatically produce the .mem files.
 
         //public void saveToOutputStream(java.io.DataOutputStream stream) 
throws IOException
@@ -227,7 +227,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
             if (serialObj.Exists && LoadFromObj(serialObj))
             {
-
+                // LUCENENET: intentionally empty
             }
             else
             {
@@ -259,7 +259,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         public virtual void LoadFromFile(string dctFilePath)
         {
             int i, cnt, length, total = 0;
-            // The file only counted 6763 Chinese characters plus 5 reserved 
slots 3756~3760.  
+            // The file only counted 6763 Chinese characters plus 5 reserved 
slots 3756~3760.
             // The 3756th is used (as a header) to store information.
             int[]
             buffer = new int[3];
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs 
b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
index cb609ff1c..b8cd7cbbf 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs
@@ -45,9 +45,9 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         public const int PRIME_INDEX_LENGTH = 12071;
 
         /// <summary>
-        /// wordIndexTable guarantees to hash all Chinese characters in 
Unicode into 
-        /// PRIME_INDEX_LENGTH array. There will be conflict, but in reality 
this 
-        /// program only handles the 6768 characters found in GB2312 plus some 
+        /// wordIndexTable guarantees to hash all Chinese characters in 
Unicode into
+        /// PRIME_INDEX_LENGTH array. There will be conflict, but in reality 
this
+        /// program only handles the 6768 characters found in GB2312 plus some
         /// ASCII characters. Therefore in order to guarantee better 
precision, it is
         /// necessary to retain the original symbol in the charIndexTable.
         /// </summary>
@@ -56,13 +56,13 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         private char[] charIndexTable;
 
         /// <summary>
-        /// To avoid taking too much space, the data structure needed to store 
the 
+        /// To avoid taking too much space, the data structure needed to store 
the
         /// lexicon requires two multidimensional arrays to store word and 
frequency.
-        /// Each word is placed in a char[]. Each char represents a Chinese 
char or 
-        /// other symbol.  Each frequency is put into an int. These two arrays 
-        /// correspond to each other one-to-one. Therefore, one can use 
-        /// wordItem_charArrayTable[i][j] to look up word from lexicon, and 
-        /// wordItem_frequencyTable[i][j] to look up the corresponding 
frequency. 
+        /// Each word is placed in a char[]. Each char represents a Chinese 
char or
+        /// other symbol.  Each frequency is put into an int. These two arrays
+        /// correspond to each other one-to-one. Therefore, one can use
+        /// wordItem_charArrayTable[i][j] to look up word from lexicon, and
+        /// wordItem_frequencyTable[i][j] to look up the corresponding 
frequency.
         /// </summary>
         private char[][][] wordItem_charArrayTable;
 
@@ -119,7 +119,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
 
             if (serialObj.Exists && LoadFromObj(serialObj))
             {
-
+                // LUCENENET: intentionally empty
             }
             else
             {
@@ -179,11 +179,11 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         // The data in Lucene is stored in a proprietary binary format 
(similar to
         // .NET's BinarySerializer) that cannot be read back in .NET. 
Therefore, the
         // data was extracted using Java's DataOutputStream using the 
following Java code.
-        // It can then be read in using the LoadFromInputStream method below 
+        // It can then be read in using the LoadFromInputStream method below
         // (using a DataInputStream instead of a BinaryReader), and saved
         // in the correct (BinaryWriter) format by calling the SaveToObj 
method.
         // Alternatively, the data can be loaded from disk using the files
-        // here(https://issues.apache.org/jira/browse/LUCENE-1629) in the 
analysis.data.zip file, 
+        // here(https://issues.apache.org/jira/browse/LUCENE-1629) in the 
analysis.data.zip file,
         // which will automatically produce the .mem files.
 
         //public void saveToOutputStream(java.io.DataOutputStream stream) 
throws IOException
@@ -415,7 +415,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         }
 
         /// <summary>
-        /// The original lexicon puts all information with punctuation into a 
+        /// The original lexicon puts all information with punctuation into a
         /// chart (from 1 to 3755). Here it then gets expanded, separately 
being
         /// placed into the chart that has the corresponding symbol.
         /// </summary>
@@ -423,8 +423,8 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         {
             int i;
             int cnt;
-            // Punctuation then treating index 3755 as 1, 
-            // distribute the original punctuation corresponding dictionary 
into 
+            // Punctuation then treating index 3755 as 1,
+            // distribute the original punctuation corresponding dictionary 
into
             int delimiterIndex = 3755 + GB2312_FIRST_CHAR;
             i = 0;
             while (i < wordItem_charArrayTable[delimiterIndex].Length)
@@ -546,7 +546,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
         }
 
         /// <summary>
-        /// Calculate character <paramref name="c"/>'s position in hash table, 
+        /// Calculate character <paramref name="c"/>'s position in hash table,
         /// then initialize the value of that position in the address table.
         /// </summary>
         private bool SetTableIndex(char c, int j)
diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs 
b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
index d84551ca8..58a0f9271 100644
--- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
+++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie2.cs
@@ -144,7 +144,8 @@ namespace Egothor.Stemmer
                     }
                 }
             }
-            catch (Exception x) when (x.IsIndexOutOfBoundsException()) { }
+            catch (Exception x) when (x.IsIndexOutOfBoundsException()) { /* 
ignored */ }
+
             return result.ToString();
         }
 
@@ -202,7 +203,8 @@ namespace Egothor.Stemmer
                     }
                 }
             }
-            catch (Exception x) when (x.IsIndexOutOfBoundsException()) { }
+            catch (Exception x) when (x.IsIndexOutOfBoundsException()) { /* 
ignored */ }
+
             return result.ToString();
         }
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs 
b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
index 8b4892f7c..b950a4903 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs
@@ -371,7 +371,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
         /// <summary>
         /// Instantiates the given analysis factory class after pulling params 
from
         /// the given stream tokenizer, then stores the result in the 
appropriate
-        /// pipeline component list.        
+        /// pipeline component list.
         /// </summary>
         /// <param name="stok">Stream tokenizer from which to draw analysis 
factory params.</param>
         /// <param name="clazz">Analysis factory class to instantiate.</param>
@@ -468,7 +468,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks
                             }
                     }
                 }
-            WHILE_LOOP_BREAK: { }
+                WHILE_LOOP_BREAK: { /* LUCENENET: intentionally empty */ }
 
                 if (!argMap.ContainsKey("luceneMatchVersion"))
                 {
diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs 
b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
index 56162161a..fc917c995 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs
@@ -40,8 +40,8 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
 
         /// <summary>
         /// Read algorithm from file.
-        /// Property examined: alt.tasks.packages == comma separated list of 
-        /// alternate Assembly names where tasks would be searched for, when 
not found 
+        /// Property examined: alt.tasks.packages == comma separated list of
+        /// alternate Assembly names where tasks would be searched for, when 
not found
         /// in the default Assembly (that of <see cref="PerfTask"/>).
         /// If the same task class appears in more than one Assembly, the 
Assembly
         /// indicated first in this list will be used.
@@ -163,7 +163,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils
                                     }
                                     stok.NextToken();
                                 }
-                                BALANCED_PARENS_BREAK: { }
+                                BALANCED_PARENS_BREAK: { /* LUCENENET: 
intentionally empty */ }
                             }
                             stok.EndOfLineIsSignificant = false;
                             string prm = @params.ToString().Trim();
diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs 
b/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs
index e788e211e..3498f3920 100644
--- a/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs
+++ b/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs
@@ -962,6 +962,7 @@ namespace TagSoup
                 }
                 catch (Exception ioe) when (ioe.IsIOException())
                 {
+                    // ignored
                 } // Can't be thrown for root I believe.
             }
             if (Foreign(prefix, ns))
@@ -1094,6 +1095,7 @@ namespace TagSoup
                     }
                     catch (Exception)
                     {
+                        // ignored
                     }
                 }
             }
diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs 
b/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs
index 35b30f587..9e77f92b7 100644
--- a/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs
+++ b/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs
@@ -1225,6 +1225,7 @@ namespace TagSoup
             }
             for (; prefix is null || nsSupport.GetUri(prefix) != null; prefix 
= "__NS" + ++prefixCounter)
             {
+                // LUCENENET: intentionally empty
             }
             nsSupport.DeclarePrefix(prefix, uri);
             doneDeclTable[uri] = prefix;
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs 
b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs
index dde879d23..61f2eb785 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs
@@ -112,7 +112,7 @@ namespace Lucene.Net.Codecs.SimpleText
 
             // LUCENENET specific - To avoid boxing/unboxing, we don't
             // call GetNumericValue(). Instead, we check the field.NumericType 
and then
-            // call the appropriate conversion method. 
+            // call the appropriate conversion method.
             if (field.NumericType != NumericFieldType.NONE)
             {
                 switch (field.NumericType)
@@ -192,6 +192,7 @@ namespace Lucene.Net.Codecs.SimpleText
             }
             catch (Exception ignored) when (ignored.IsThrowable())
             {
+                // ignored
             }
             IOUtils.DeleteFilesIgnoringExceptions(_directory,
                     IndexFileNames.SegmentFileName(_segment, "", 
FIELDS_EXTENSION));
@@ -238,4 +239,4 @@ namespace Lucene.Net.Codecs.SimpleText
             SimpleTextUtil.WriteNewline(_output);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs 
b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs
index 801185317..284fd756e 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs
@@ -185,6 +185,7 @@ namespace Lucene.Net.Codecs.SimpleText
             }
             catch (Exception t) when (t.IsThrowable())
             {
+                // ignored
             }
             IOUtils.DeleteFilesIgnoringExceptions(_directory,
                     IndexFileNames.SegmentFileName(_segment, "", 
VECTORS_EXTENSION));
@@ -234,4 +235,4 @@ namespace Lucene.Net.Codecs.SimpleText
             SimpleTextUtil.WriteNewline(_output);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs 
b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
index d73ef5869..fae80ddc6 100644
--- a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs
@@ -256,7 +256,7 @@ namespace Lucene.Net.Search.Highlight
                     lastEndOffset = Math.Max(lastEndOffset, endOffset);
                 }
 
-                //Test what remains of the original text beyond the point 
where we stopped analyzing 
+                //Test what remains of the original text beyond the point 
where we stopped analyzing
                 if (
                     //                    if there is text beyond the last 
token considered..
                     (lastEndOffset < text.Length)
@@ -332,6 +332,7 @@ namespace Lucene.Net.Search.Highlight
                     }
                     catch (Exception e) when (e.IsException())
                     {
+                        // ignored
                     }
                 }
             }
@@ -340,7 +341,7 @@ namespace Lucene.Net.Search.Highlight
         /// <summary>
         /// Improves readability of a score-sorted list of TextFragments by 
merging any fragments
         /// that were contiguous in the original text into one larger fragment 
with the correct order.
-        /// This will leave a "null" in the array entry for the lesser scored 
fragment. 
+        /// This will leave a "null" in the array entry for the lesser scored 
fragment.
         /// </summary>
         /// <param name="frag">An array of document fragments in descending 
score</param>
         private static void MergeContiguousFragments(TextFragment[] frag) // 
LUCENENET: CA1822: Mark members as static
@@ -357,7 +358,7 @@ namespace Lucene.Net.Search.Highlight
                         {
                             continue;
                         }
-                        //merge any contiguous blocks 
+                        //merge any contiguous blocks
                         for (int x = 0; x < frag.Length; x++)
                         {
                             if (frag[x] is null)
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs 
b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
index 070a97ffc..6c4f0003c 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
@@ -339,7 +339,7 @@ namespace Lucene.Net.Search.VectorHighlight
                     WeightedFragInfo weightedFragInfo = new 
WeightedFragInfo(fragStart, fragEnd, subInfos, boost);
                     fieldNameToFragInfos[field.Name].Add(weightedFragInfo);
                 }
-            fragInfos_continue: { }
+                fragInfos_continue: { /* LUCENENET: intentionally empty */ }
             }
 
             JCG.List<WeightedFragInfo> result = new 
JCG.List<WeightedFragInfo>();
@@ -351,7 +351,7 @@ namespace Lucene.Net.Search.VectorHighlight
 
             return result;
         }
-        
+
         public virtual char MultiValuedSeparator
         {
             get => multiValuedSeparator;
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs 
b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
index c87d0acbc..adf028bef 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -934,6 +934,7 @@ namespace Lucene.Net.QueryParsers.Classic
                 }
                 catch (LookaheadSuccess)
                 {
+                    // ignored
                 }
             }
             jj_rescan = false;
diff --git 
a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs 
b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
index 148b25ebe..e6ab1fa20 100644
--- 
a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
+++ 
b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
@@ -1201,7 +1201,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                             jj_expentries.Add(jj_expentry);
                             goto jj_entries_loop_break;
                         }
-                        jj_entries_loop_continue: { }
+                        jj_entries_loop_continue: { /* LUCENENET: 
intentionally empty */ }
                     }
                     jj_entries_loop_break:
                         if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = 
kind;
@@ -1291,9 +1291,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                         p = p.next;
                     } while (p != null);
                 }
-#pragma warning disable 168
-                catch (LookaheadSuccess ls) { }
-#pragma warning restore 168
+                catch (LookaheadSuccess) { /* ignored */ }
             }
             jj_rescan = false;
         }
diff --git 
a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
 
b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
index 660dad827..420f7674e 100644
--- 
a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
+++ 
b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
@@ -943,7 +943,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
                 }
                 throw new TokenMgrError(EOFSeen, curLexState, error_line, 
error_column, error_after, m_curChar, TokenMgrError.LEXICAL_ERROR);
 
-                EOFLoop_continue: { }
+                EOFLoop_continue: { /* LUCENENET: intentionally empty */ }
             }
         }
 
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs 
b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
index 36c511ee0..35c2a45ef 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -897,7 +897,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
                         p = p.next;
                     } while (p != null);
                 }
-                catch (LookaheadSuccess /*ls*/) { }
+                catch (LookaheadSuccess /*ls*/) { /* ignored */ }
             }
             jj_rescan = false;
         }
diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs 
b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
index 534273dff..5bae4bd0d 100644
--- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs
@@ -239,7 +239,7 @@ namespace Lucene.Net.Spatial.Prefix
                     }
                 }
             }//main loop
-            main_break: { }
+            main_break: { /* LUCENENET: intentionally empty */ }
 
             return Finish();
         }
@@ -514,4 +514,4 @@ namespace Lucene.Net.Spatial.Prefix
         #endregion Nested type: VNode
 
     } //class VisitorTemplate
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs 
b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
index 0f7b2d42b..d23a02f0a 100644
--- a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
+++ b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs
@@ -458,7 +458,10 @@ namespace Lucene.Net.Search.Spell
                 var dir = this.spellIndex;
 #pragma warning disable 612, 618
                 using (var writer = new IndexWriter(dir, new 
IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, null)
-                    { OpenMode = OpenMode.CREATE })) { }
+                           { OpenMode = OpenMode.CREATE }))
+                {
+                    // LUCENENET: intentionally empty, replaces .close()
+                }
 #pragma warning restore 612, 618
                 SwapSearcher(dir);
             }
@@ -765,4 +768,4 @@ namespace Lucene.Net.Search.Spell
         ///         disposed, otherwise <c>false</c>. </returns>
         internal virtual bool IsDisposed => disposed;
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs 
b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
index 178f07b74..7e1adac67 100644
--- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs
@@ -24,9 +24,9 @@ namespace Lucene.Net.Search.Suggest.Fst
 
     /// <summary>
     /// Finite state automata based implementation of "autocomplete" 
functionality.
-    /// 
+    ///
     /// <h2>Implementation details</h2>
-    /// 
+    ///
     /// <para>
     /// The construction step in the object finalizer works as follows:
     /// <list type="bullet">
@@ -41,10 +41,10 @@ namespace Lucene.Net.Search.Suggest.Fst
     /// root node has arcs labeled with all possible weights. We cache all 
these
     /// arcs, highest-weight first.</description></item>
     /// </list>
-    /// 
+    ///
     /// </para>
     /// <para>
-    /// At runtime, in <see cref="FSTCompletion.DoLookup(string, int)"/>, 
+    /// At runtime, in <see cref="FSTCompletion.DoLookup(string, int)"/>,
     /// the automaton is utilized as follows:
     /// <list type="bullet">
     /// <item><description>For each possible term weight encoded in the 
automaton (cached arcs from
@@ -63,43 +63,43 @@ namespace Lucene.Net.Search.Suggest.Fst
     /// insufficient, we proceed to the next (smaller) weight leaving the root 
node
     /// and repeat the same algorithm again.</description></item>
     /// </list>
-    /// 
+    ///
     /// <h2>Runtime behavior and performance characteristic</h2>
-    /// 
+    ///
     /// The algorithm described above is optimized for finding suggestions to 
short
     /// prefixes in a top-weights-first order. This is probably the most 
common use
     /// case: it allows presenting suggestions early and sorts them by the 
global
     /// frequency (and then alphabetically).
-    /// 
+    ///
     /// </para>
     /// <para>
     /// If there is an exact match in the automaton, it is returned first on 
the
     /// results list (even with by-weight sorting).
-    /// 
+    ///
     /// </para>
     /// <para>
     /// Note that the maximum lookup time for <b>any prefix</b> is the time of
     /// descending to the subtree, plus traversal of the subtree up to the 
number of
     /// requested suggestions (because they are already presorted by weight on 
the
     /// root level and alphabetically at any node level).
-    /// 
+    ///
     /// </para>
     /// <para>
     /// To order alphabetically only (no ordering by priorities), use 
identical term
     /// weights for all terms. Alphabetical suggestions are returned even if
     /// non-constant weights are used, but the algorithm for doing this is
     /// suboptimal.
-    /// 
+    ///
     /// </para>
     /// <para>
     /// "alphabetically" in any of the documentation above indicates UTF-8
     /// representation order, nothing else.
-    /// 
+    ///
     /// </para>
     /// <para>
     /// <b>NOTE</b>: the FST file format is experimental and subject to 
suddenly
     /// change, requiring you to rebuild the FST suggest index.
-    /// 
+    ///
     /// </para>
     /// </summary>
     /// <seealso cref="FSTCompletion"/>
@@ -118,7 +118,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// highly-weighted completions (because these are filled-in first), 
but will
         /// decrease significantly for low-weighted terms (but these should be
         /// infrequent, so it is all right).
-        /// 
+        ///
         /// <para>
         /// The number of buckets must be within [1, 255] range.
         /// </para>
@@ -173,7 +173,7 @@ namespace Lucene.Net.Search.Suggest.Fst
         /// </param>
         /// <param name="shareMaxTailLength">
         ///          Max shared suffix sharing length.
-        ///          
+        ///
         ///          See the description of this parameter in <see 
cref="Builder"/>'s constructor.
         ///          In general, for very large inputs you'll want to 
construct a non-minimal
         ///          automaton which will be larger, but the construction will 
take far less ram.
@@ -232,9 +232,9 @@ namespace Lucene.Net.Search.Suggest.Fst
             this.automaton = BuildAutomaton(sorter);
 
             // Dispose of it if it is a disposable
-            using (sorter as IDisposable)
+            if (sorter is IDisposable disposable)
             {
-
+                disposable.Dispose();
             }
 
             return new FSTCompletion(automaton);
@@ -269,4 +269,4 @@ namespace Lucene.Net.Search.Suggest.Fst
             return count == 0 ? null : builder.Finish();
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs 
b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
index 3e8a93783..4785a8f6f 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
@@ -183,9 +183,10 @@ namespace Lucene.Net.Codecs.Lucene3x
                     try
                     {
                         Dispose();
-                    } // keep our original exception
+                    }
                     catch (Exception t) when (t.IsThrowable())
                     {
+                        // keep our original exception
                     }
                 }
             }
@@ -223,7 +224,7 @@ namespace Lucene.Net.Codecs.Lucene3x
         }
 
         /// <summary>
-        /// The number of documents in the reader. 
+        /// The number of documents in the reader.
         /// <para/>
         /// NOTE: This was size() in Lucene.
         /// </summary>
@@ -477,7 +478,7 @@ namespace Lucene.Net.Codecs.Lucene3x
                     Array.Sort(termAndPostings, 
Comparer<TermAndPostings>.Create((left, right) => 
left.Term.CompareTo(right.Term)));
                 }
             }
-            
+
             private void ReadVectors()
             {
                 termAndPostings = new TermAndPostings[numTerms];
@@ -885,4 +886,4 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs 
b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs
index 7dbdc1bea..c08a98893 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs
@@ -405,6 +405,7 @@ namespace Lucene.Net.Codecs.Lucene3x
 
             while (CompareAsUTF16(term, enumerator.Term()) > 0 && 
enumerator.Next())
             {
+                // LUCENENET: intentionally empty
             }
 
             if (CompareAsUTF16(term, enumerator.Term()) == 0)
@@ -440,4 +441,4 @@ namespace Lucene.Net.Codecs.Lucene3x
             return index is null ? 0 : index.RamBytesUsed();
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs 
b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
index 6d4b45c7c..b6736feb2 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
@@ -120,9 +120,10 @@ namespace Lucene.Net.Codecs.Lucene40
                     try
                     {
                         Dispose();
-                    } // ensure we throw our original exception
+                    }
                     catch (Exception t) when (t.IsThrowable())
                     {
+                        // ensure we throw our original exception
                     }
                 }
             }
@@ -317,4 +318,4 @@ namespace Lucene.Net.Codecs.Lucene40
         {
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs 
b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
index eacfee705..e3838768e 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
@@ -156,7 +156,9 @@ namespace Lucene.Net.Codecs.Lucene40
             }
             catch (Exception ignored) when (ignored.IsThrowable())
             {
+                // ignored
             }
+
             IOUtils.DeleteFilesIgnoringExceptions(directory, 
IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), 
IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION));
         }
 
@@ -173,7 +175,7 @@ namespace Lucene.Net.Codecs.Lucene40
 
             // LUCENENET specific - To avoid boxing/unboxing, we don't
             // call GetNumericValue(). Instead, we check the field.NumericType 
and then
-            // call the appropriate conversion method. 
+            // call the appropriate conversion method.
             if (field.NumericType != NumericFieldType.NONE)
             {
                 switch (field.NumericType)
@@ -422,4 +424,4 @@ namespace Lucene.Net.Codecs.Lucene40
             return docCount;
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs 
b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
index 87a986487..3a2e6b229 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
@@ -336,7 +336,9 @@ namespace Lucene.Net.Codecs.Lucene40
             }
             catch (Exception ignored) when (ignored.IsThrowable())
             {
+                // ignored
             }
+
             IOUtils.DeleteFilesIgnoringExceptions(directory,
                 IndexFileNames.SegmentFileName(segment, "", 
Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION),
                 IndexFileNames.SegmentFileName(segment, "", 
Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION),
diff --git a/src/Lucene.Net/Index/CheckIndex.cs 
b/src/Lucene.Net/Index/CheckIndex.cs
index 31084de38..9b84b8bc3 100644
--- a/src/Lucene.Net/Index/CheckIndex.cs
+++ b/src/Lucene.Net/Index/CheckIndex.cs
@@ -669,6 +669,7 @@ namespace Lucene.Net.Index
             {
                 SegmentCommitInfo info = sis[i];
                 int segmentName = 0;
+
                 try
                 {
                     // LUCENENET: Optimized to not allocate a substring during 
the parse
@@ -676,15 +677,19 @@ namespace Lucene.Net.Index
                 }
                 catch
                 {
+                    // ignored
                 }
+
                 if (segmentName > result.MaxSegmentName)
                 {
                     result.MaxSegmentName = segmentName;
                 }
+
                 if (onlySegments != null && 
!onlySegments.Contains(info.Info.Name))
                 {
                     continue;
                 }
+
                 Status.SegmentInfoStatus segInfoStat = new 
Status.SegmentInfoStatus();
                 result.SegmentInfos.Add(segInfoStat);
                 Msg(infoStream, "  " + (1 + i) + " of " + numSegments + ": 
name=" + info.Info.Name + " docCount=" + info.Info.DocCount);
diff --git a/src/Lucene.Net/Index/IndexWriter.cs 
b/src/Lucene.Net/Index/IndexWriter.cs
index 443799b8d..1549e82d2 100644
--- a/src/Lucene.Net/Index/IndexWriter.cs
+++ b/src/Lucene.Net/Index/IndexWriter.cs
@@ -2873,6 +2873,7 @@ namespace Lucene.Net.Index
                             }
                             catch (Exception t) when (t.IsThrowable())
                             {
+                                // ignored
                             }
                         }
 
@@ -3411,6 +3412,7 @@ namespace Lucene.Net.Index
                                 }
                                 catch (Exception t) when (t.IsThrowable())
                                 {
+                                    // ignored
                                 }
                             }
                         }
@@ -3440,6 +3442,7 @@ namespace Lucene.Net.Index
                                     }
                                     catch (Exception t) when (t.IsThrowable())
                                     {
+                                        // ignored
                                     }
                                 }
                             }
@@ -3816,6 +3819,7 @@ namespace Lucene.Net.Index
                         }
                         catch (Exception t) when (t.IsThrowable())
                         {
+                            // ignored
                         }
                     }
                 }
@@ -6299,19 +6303,23 @@ namespace Lucene.Net.Index
                 else
                 {
                     IOUtils.DisposeWhileHandlingException(cfsDir);
+
                     try
                     {
                         directory.DeleteFile(fileName);
                     }
                     catch (Exception t) when (t.IsThrowable())
                     {
+                        // ignored
                     }
+
                     try
                     {
                         
directory.DeleteFile(Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, 
"", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
                     }
                     catch (Exception t) when (t.IsThrowable())
                     {
+                        // ignored
                     }
                 }
             }
@@ -6471,7 +6479,10 @@ namespace Lucene.Net.Index
         {
             try
             {
-                using (var input = dir.OpenInput(fileName, IOContext.DEFAULT)) 
{ }
+                using (var input = dir.OpenInput(fileName, IOContext.DEFAULT))
+                {
+                    // LUCENENET: intentionally empty, replaces .close()
+                }
                 return true;
             }
             catch (Exception e) when 
(e.IsNoSuchFileExceptionOrFileNotFoundException())
diff --git a/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs 
b/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs
index ab5312859..4cac1d0f5 100644
--- a/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs
+++ b/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs
@@ -55,13 +55,16 @@ namespace Lucene.Net.Index
             }
             catch (Exception t) when (t.IsThrowable())
             {
+                // ignored
             }
+
             try
             {
                 second.Abort();
             }
             catch (Exception t) when (t.IsThrowable())
             {
+                // ignored
             }
         }
 
@@ -78,4 +81,4 @@ namespace Lucene.Net.Index
             second.FinishDocument();
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Store/Directory.cs 
b/src/Lucene.Net/Store/Directory.cs
index f58bfc864..0f8facc62 100644
--- a/src/Lucene.Net/Store/Directory.cs
+++ b/src/Lucene.Net/Store/Directory.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Store
     /// deleted.  Random access is permitted both when reading and writing.
     /// <para/>
     /// .NET's i/o APIs not used directly, but rather all i/o is
-    /// through this API.  This permits things such as: 
+    /// through this API.  This permits things such as:
     /// <list type="bullet">
     ///     <item><description> implementation of RAM-based 
indices;</description></item>
     ///     <item><description> implementation indices stored in a 
database;</description></item>
@@ -100,7 +100,7 @@ namespace Lucene.Net.Store
         /// specified read buffer size.  The particular <see cref="Directory"/>
         /// implementation may ignore the buffer size.  Currently
         /// the only <see cref="Directory"/> implementations that respect this
-        /// parameter are <see cref="FSDirectory"/> and 
+        /// parameter are <see cref="FSDirectory"/> and
         /// <see cref="CompoundFileDirectory"/>.
         /// <para/>Throws <see cref="FileNotFoundException"/>
         /// if the file does not exist.
@@ -225,6 +225,7 @@ namespace Lucene.Net.Store
                         }
                         catch (Exception t) when (t.IsThrowable())
                         {
+                            // ignored
                         }
                     }
                 }
@@ -370,7 +371,7 @@ namespace Lucene.Net.Store
 
             /// <summary>
             /// Expert: implements seek.  Sets current position in this file, 
where
-            /// the next <see cref="ReadInternal(byte[], int, int)"/> will 
occur. 
+            /// the next <see cref="ReadInternal(byte[], int, int)"/> will 
occur.
             /// </summary>
             /// <seealso cref="ReadInternal(byte[], int, int)"/>
             protected override void SeekInternal(long pos)
@@ -411,4 +412,4 @@ namespace Lucene.Net.Store
             }
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Store/LockStressTest.cs 
b/src/Lucene.Net/Store/LockStressTest.cs
index dc0b218e5..d18ee0f41 100644
--- a/src/Lucene.Net/Store/LockStressTest.cs
+++ b/src/Lucene.Net/Store/LockStressTest.cs
@@ -169,10 +169,9 @@ namespace Lucene.Net.Store
                 {
                     obtained = l.Obtain(rnd.Next(100) + 10);
                 }
-#pragma warning disable 168
-                catch (LockObtainFailedException e)
-#pragma warning restore 168
+                catch (LockObtainFailedException /*e*/)
                 {
+                    // ignored
                 }
 
                 if (obtained)
diff --git a/src/Lucene.Net/Store/NativeFSLockFactory.cs 
b/src/Lucene.Net/Store/NativeFSLockFactory.cs
index a38d86617..ec061dcee 100644
--- a/src/Lucene.Net/Store/NativeFSLockFactory.cs
+++ b/src/Lucene.Net/Store/NativeFSLockFactory.cs
@@ -256,7 +256,7 @@ namespace Lucene.Net.Store
     // Note that using NativeFSLock would be ideal for all platforms. However, 
there is a
     // small chance that provoking lock/share exceptions will fail. In that 
rare case, we
     // fallback to this substandard implementation.
-    // 
+    //
     // Reference: https://stackoverflow.com/q/46380483
     internal class FallbackNativeFSLock : Lock
     {
@@ -599,6 +599,7 @@ namespace Lucene.Net.Store
                 {
                     using (var stream = GetLockFileStream(FileMode.Open))
                     {
+                        // LUCENENET: intentionally empty, replaces .close()
                     }
                     return false;
                 }
@@ -768,6 +769,7 @@ namespace Lucene.Net.Store
                             }
                             catch
                             {
+                                // ignored
                             }
                         }
                     }
diff --git a/src/Lucene.Net/Store/RAMOutputStream.cs 
b/src/Lucene.Net/Store/RAMOutputStream.cs
index eccab0bf4..6b6ffd5c6 100644
--- a/src/Lucene.Net/Store/RAMOutputStream.cs
+++ b/src/Lucene.Net/Store/RAMOutputStream.cs
@@ -146,6 +146,7 @@ namespace Lucene.Net.Store
             get => file.length;
             set
             {
+                // LUCENENET: intentionally empty
             }
         }
 
@@ -222,4 +223,4 @@ namespace Lucene.Net.Store
 
         public override long Checksum => crc.Value;
     }
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs
index 551f65c7d..c10f55aef 100644
--- a/src/Lucene.Net/Util/Fst/FST.cs
+++ b/src/Lucene.Net/Util/Fst/FST.cs
@@ -952,9 +952,11 @@ namespace Lucene.Net.Util.Fst
                         }
                         if (arc.Flag(FST.BIT_STOP_NODE))
                         {
+                            // LUCENENET: intentionally empty to match Lucene
                         }
                         else if (arc.Flag(FST.BIT_TARGET_NEXT))
                         {
+                            // LUCENENET: intentionally empty to match Lucene
                         }
                         else if (packed)
                         {
diff --git a/src/Lucene.Net/Util/VirtualMethod.cs 
b/src/Lucene.Net/Util/VirtualMethod.cs
index ef283859b..f6cc17ca9 100644
--- a/src/Lucene.Net/Util/VirtualMethod.cs
+++ b/src/Lucene.Net/Util/VirtualMethod.cs
@@ -188,6 +188,7 @@ namespace Lucene.Net.Util
                     // just to mimic the fact they were swallowing in Java 
when the method isn't found.
                     catch (AmbiguousMatchException)
                     {
+                        // ignored
                     }
                 }
 
@@ -202,7 +203,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// Utility method that compares the implementation/override distance 
of two methods. </summary>
-        /// <returns> 
+        /// <returns>
         /// <list type="bullet">
         ///     <item><description>&gt; 1, iff <paramref name="m1"/> is 
overridden/implemented in a subclass of the class overriding/declaring 
<paramref name="m2"/></description></item>
         ///     <item><description>&lt; 1, iff <paramref name="m2"/> is 
overridden in a subclass of the class overriding/declaring <paramref 
name="m1"/></description></item>
@@ -240,4 +241,4 @@ namespace Lucene.Net.Util
 #endif
         }
     }
-}
\ No newline at end of file
+}

Reply via email to