Lucene.Net.Codecs.Compressing: Fixed XML documentation comment warnings

Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/5dc5193a
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/5dc5193a
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/5dc5193a

Branch: refs/heads/master
Commit: 5dc5193a89e01c703095c653325f4b665aa386b9
Parents: 95b5d4b
Author: Shad Storhaug <[email protected]>
Authored: Mon Jun 5 15:08:06 2017 +0700
Committer: Shad Storhaug <[email protected]>
Committed: Tue Jun 6 06:58:42 2017 +0700

----------------------------------------------------------------------
 CONTRIBUTING.md                                 |  5 +-
 .../CompressingStoredFieldsFormat.cs            | 61 ++++++++++----------
 .../CompressingStoredFieldsIndexReader.cs       |  3 +-
 .../CompressingStoredFieldsIndexWriter.cs       | 49 ++++++++--------
 .../CompressingStoredFieldsReader.cs            |  9 +--
 .../CompressingStoredFieldsWriter.cs            |  5 +-
 .../Compressing/CompressingTermVectorsFormat.cs | 35 +++++------
 .../Compressing/CompressingTermVectorsReader.cs |  5 +-
 .../Compressing/CompressingTermVectorsWriter.cs |  9 +--
 .../Codecs/Compressing/CompressionMode.cs       |  9 +--
 src/Lucene.Net/Codecs/Compressing/Compressor.cs |  4 +-
 .../Codecs/Compressing/Decompressor.cs          | 22 +++----
 src/Lucene.Net/Codecs/Compressing/LZ4.cs        | 20 +++----
 13 files changed, 123 insertions(+), 113 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/CONTRIBUTING.md
----------------------------------------------------------------------
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 5c39dc4..36b780b 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -50,9 +50,8 @@ helpers to help with that, see for examples see our [Java 
style methods to avoid
 
 ### Documentation Comments == up for grabs:
 
-1. Lucene.Net.Core (project)
-   1. Codecs.Compressing (namespace)
-   2. Util.Packed (namespace)
+1. Lucene.Net (project)
+   1. Util.Packed (namespace)
 2. Lucene.Net.Codecs (project)
    1. Appending (namespace)
    2. BlockTerms (namespace)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs
index 2577368..c88d8e7 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs
@@ -23,17 +23,18 @@ namespace Lucene.Net.Codecs.Compressing
     using SegmentInfo = Lucene.Net.Index.SegmentInfo;
 
     /// <summary>
-    /// A <seealso cref="StoredFieldsFormat"/> that is very similar to
-    /// <seealso cref="Lucene40StoredFieldsFormat"/> but compresses documents 
in chunks in
+    /// A <see cref="StoredFieldsFormat"/> that is very similar to
+    /// <see cref="Lucene40.Lucene40StoredFieldsFormat"/> but compresses 
documents in chunks in
     /// order to improve the compression ratio.
-    /// <p>
-    /// For a chunk size of <tt>chunkSize</tt> bytes, this <seealso 
cref="StoredFieldsFormat"/>
-    /// does not support documents larger than (<tt>2<sup>31</sup> - 
chunkSize</tt>)
+    /// <para/>
+    /// For a chunk size of <c>chunkSize</c> bytes, this <see 
cref="StoredFieldsFormat"/>
+    /// does not support documents larger than (<c>2<sup>31</sup> - 
chunkSize</c>)
     /// bytes. In case this is a problem, you should use another format, such 
as
-    /// <seealso cref="Lucene40StoredFieldsFormat"/>.
-    /// <p>
-    /// For optimal performance, you should use a <seealso 
cref="MergePolicy"/> that returns
+    /// <see cref="Lucene40.Lucene40StoredFieldsFormat"/>.
+    /// <para/>
+    /// For optimal performance, you should use a <see 
cref="Index.MergePolicy"/> that returns
     /// segments that have the biggest byte size first.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public class CompressingStoredFieldsFormat : StoredFieldsFormat
@@ -44,47 +45,47 @@ namespace Lucene.Net.Codecs.Compressing
         private readonly int chunkSize;
 
         /// <summary>
-        /// Create a new <seealso cref="CompressingStoredFieldsFormat"/> with 
an empty segment
+        /// Create a new <see cref="CompressingStoredFieldsFormat"/> with an 
empty segment
         /// suffix.
         /// </summary>
-        /// <seealso cref= 
CompressingStoredFieldsFormat#CompressingStoredFieldsFormat(String, String, 
CompressionMode, int) </seealso>
+        /// <seealso 
cref="CompressingStoredFieldsFormat.CompressingStoredFieldsFormat(string, 
string, CompressionMode, int)"/>
         public CompressingStoredFieldsFormat(string formatName, 
CompressionMode compressionMode, int chunkSize)
             : this(formatName, "", compressionMode, chunkSize)
         {
         }
 
         /// <summary>
-        /// Create a new <seealso cref="CompressingStoredFieldsFormat"/>.
-        /// <p>
-        /// <code>formatName</code> is the name of the format. this name will 
be used
+        /// Create a new <see cref="CompressingStoredFieldsFormat"/>.
+        /// <para/>
+        /// <paramref name="formatName"/> is the name of the format. This name 
will be used
         /// in the file formats to perform
-        /// <seealso cref="CodecUtil#checkHeader(Lucene.Net.Store.DataInput, 
String, int, int) codec header checks"/>.
-        /// <p>
-        /// <code>segmentSuffix</code> is the segment suffix. this suffix is 
added to
+        /// codec header checks (<see 
cref="CodecUtil.CheckHeader(Lucene.Net.Store.DataInput, string, int, int)"/>).
+        /// <para/>
+        /// <paramref name="segmentSuffix"/> is the segment suffix. this 
suffix is added to
         /// the result file name only if it's not the empty string.
-        /// <p>
-        /// The <code>compressionMode</code> parameter allows you to choose 
between
+        /// <para/>
+        /// The <paramref name="compressionMode"/> parameter allows you to 
choose between
         /// compression algorithms that have various compression and 
decompression
         /// speeds so that you can pick the one that best fits your indexing 
and
         /// searching throughput. You should never instantiate two
-        /// <seealso cref="CompressingStoredFieldsFormat"/>s that have the 
same name but
-        /// different <seealso cref="compressionMode"/>s.
-        /// <p>
-        /// <code>chunkSize</code> is the minimum byte size of a chunk of 
documents.
-        /// A value of <code>1</code> can make sense if there is redundancy 
across
+        /// <see cref="CompressingStoredFieldsFormat"/>s that have the same 
name but
+        /// different <see cref="compressionMode"/>s.
+        /// <para/>
+        /// <paramref name="chunkSize"/> is the minimum byte size of a chunk 
of documents.
+        /// A value of <c>1</c> can make sense if there is redundancy across
         /// fields. In that case, both performance and compression ratio 
should be
-        /// better than with <seealso cref="Lucene40StoredFieldsFormat"/> with 
compressed
+        /// better than with <see cref="Lucene40.Lucene40StoredFieldsFormat"/> 
with compressed
         /// fields.
-        /// <p>
-        /// Higher values of <code>chunkSize</code> should improve the 
compression
+        /// <para/>
+        /// Higher values of <paramref name="chunkSize"/> should improve the 
compression
         /// ratio but will require more memory at indexing time and might make 
document
         /// loading a little slower (depending on the size of your OS cache 
compared
         /// to the size of your index).
         /// </summary>
-        /// <param name="formatName"> the name of the <seealso 
cref="StoredFieldsFormat"/> </param>
-        /// <param name="compressionMode"> the <seealso 
cref="compressionMode"/> to use </param>
-        /// <param name="chunkSize"> the minimum number of bytes of a single 
chunk of stored documents </param>
-        /// <seealso cref= compressionMode </seealso>
+        /// <param name="formatName"> The name of the <see 
cref="StoredFieldsFormat"/>. </param>
+        /// <param name="compressionMode"> The <see cref="CompressionMode"/> 
to use. </param>
+        /// <param name="chunkSize"> The minimum number of bytes of a single 
chunk of stored documents. </param>
+        /// <seealso cref="CompressionMode"/>
         public CompressingStoredFieldsFormat(string formatName, string 
segmentSuffix, CompressionMode compressionMode, int chunkSize)
         {
             this.formatName = formatName;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
----------------------------------------------------------------------
diff --git 
a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
index a27fc40..fb889e6 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
@@ -27,7 +27,8 @@ namespace Lucene.Net.Codecs.Compressing
     using SegmentInfo = Lucene.Net.Index.SegmentInfo;
 
     /// <summary>
-    /// Random-access reader for <seealso 
cref="CompressingStoredFieldsIndexWriter"/>.
+    /// Random-access reader for <see 
cref="CompressingStoredFieldsIndexWriter"/>.
+    /// <para/>
     /// @lucene.internal
     /// </summary>
     public sealed class CompressingStoredFieldsIndexReader

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
----------------------------------------------------------------------
diff --git 
a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
index 981e476..588cc2b 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
@@ -24,45 +24,48 @@ namespace Lucene.Net.Codecs.Compressing
     using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
 
     /// <summary>
-    /// Efficient index format for block-based <seealso cref="Codec"/>s.
-    /// <p> this writer generates a file which can be loaded into memory using
+    /// Efficient index format for block-based <see cref="Codec"/>s.
+    /// <para/> this writer generates a file which can be loaded into memory 
using
     /// memory-efficient data structures to quickly locate the block that 
contains
     /// any document.
-    /// <p>In order to have a compact in-memory representation, for every 
block of
+    /// <para>In order to have a compact in-memory representation, for every 
block of
     /// 1024 chunks, this index computes the average number of bytes per
-    /// chunk and for every chunk, only stores the difference between<ul>
+    /// chunk and for every chunk, only stores the difference between
+    /// <list type="bullet">
     /// <li>${chunk number} * ${average length of a chunk}</li>
-    /// <li>and the actual start offset of the chunk</li></ul></p>
-    /// <p>Data is written as follows:</p>
-    /// <ul>
+    /// <li>and the actual start offset of the chunk</li>
+    /// </list>
+    /// </para>
+    /// <para>Data is written as follows:</para>
+    /// <list type="bullet">
     /// <li>PackedIntsVersion, &lt;Block&gt;<sup>BlockCount</sup>, 
BlocksEndMarker</li>
-    /// <li>PackedIntsVersion --&gt; <seealso 
cref="PackedInt32s#VERSION_CURRENT"/> as a <seealso cref="DataOutput#writeVInt 
VInt"/></li>
-    /// <li>BlocksEndMarker --&gt; <tt>0</tt> as a <seealso 
cref="DataOutput#writeVInt VInt"/>, this marks the end of blocks since blocks 
are not allowed to start with <tt>0</tt></li>
+    /// <li>PackedIntsVersion --&gt; <see 
cref="PackedInt32s.VERSION_CURRENT"/> as a VInt (<see 
cref="Store.DataOutput.WriteVInt32(int)"/>) </li>
+    /// <li>BlocksEndMarker --&gt; <tt>0</tt> as a VInt (<see 
cref="Store.DataOutput.WriteVInt32(int)"/>) , this marks the end of blocks 
since blocks are not allowed to start with <tt>0</tt></li>
     /// <li>Block --&gt; BlockChunks, &lt;DocBases&gt;, 
&lt;StartPointers&gt;</li>
-    /// <li>BlockChunks --&gt; a <seealso cref="DataOutput#writeVInt VInt"/> 
which is the number of chunks encoded in the block</li>
+    /// <li>BlockChunks --&gt; a VInt (<see 
cref="Store.DataOutput.WriteVInt32(int)"/>)  which is the number of chunks 
encoded in the block</li>
     /// <li>DocBases --&gt; DocBase, AvgChunkDocs, BitsPerDocBaseDelta, 
DocBaseDeltas</li>
-    /// <li>DocBase --&gt; first document ID of the block of chunks, as a 
<seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// <li>AvgChunkDocs --&gt; average number of documents in a single chunk, 
as a <seealso cref="DataOutput#writeVInt VInt"/></li>
+    /// <li>DocBase --&gt; first document ID of the block of chunks, as a VInt 
(<see cref="Store.DataOutput.WriteVInt32(int)"/>) </li>
+    /// <li>AvgChunkDocs --&gt; average number of documents in a single chunk, 
as a VInt (<see cref="Store.DataOutput.WriteVInt32(int)"/>) </li>
     /// <li>BitsPerDocBaseDelta --&gt; number of bits required to represent a 
delta from the average using <a 
href="https://developers.google.com/protocol-buffers/docs/encoding#types";>ZigZag
 encoding</a></li>
-    /// <li>DocBaseDeltas --&gt; <seealso cref="PackedInt32s packed"/> array 
of BlockChunks elements of BitsPerDocBaseDelta bits each, representing the 
deltas from the average doc base using <a 
href="https://developers.google.com/protocol-buffers/docs/encoding#types";>ZigZag
 encoding</a>.</li>
+    /// <li>DocBaseDeltas --&gt; packed (<see cref="PackedInt32s"/>) array of 
BlockChunks elements of BitsPerDocBaseDelta bits each, representing the deltas 
from the average doc base using <a 
href="https://developers.google.com/protocol-buffers/docs/encoding#types";>ZigZag
 encoding</a>.</li>
     /// <li>StartPointers --&gt; StartPointerBase, AvgChunkSize, 
BitsPerStartPointerDelta, StartPointerDeltas</li>
-    /// <li>StartPointerBase --&gt; the first start pointer of the block, as a 
<seealso cref="DataOutput#writeVLong VLong"/></li>
-    /// <li>AvgChunkSize --&gt; the average size of a chunk of compressed 
documents, as a <seealso cref="DataOutput#writeVLong VLong"/></li>
+    /// <li>StartPointerBase --&gt; the first start pointer of the block, as a 
VLong (<see cref="Store.DataOutput.WriteVInt64(long)"/>) </li>
+    /// <li>AvgChunkSize --&gt; the average size of a chunk of compressed 
documents, as a VLong (<see cref="Store.DataOutput.WriteVInt64(long)"/>) </li>
     /// <li>BitsPerStartPointerDelta --&gt; number of bits required to 
represent a delta from the average using <a 
href="https://developers.google.com/protocol-buffers/docs/encoding#types";>ZigZag
 encoding</a></li>
-    /// <li>StartPointerDeltas --&gt; <seealso cref="PackedInt32s packed"/> 
array of BlockChunks elements of BitsPerStartPointerDelta bits each, 
representing the deltas from the average start pointer using <a 
href="https://developers.google.com/protocol-buffers/docs/encoding#types";>ZigZag
 encoding</a></li>
-    /// <li>Footer --&gt; <seealso cref="CodecUtil#writeFooter 
CodecFooter"/></li>
-    /// </ul>
-    /// <p>Notes</p>
-    /// <ul>
+    /// <li>StartPointerDeltas --&gt; packed (<see cref="PackedInt32s"/>) 
array of BlockChunks elements of BitsPerStartPointerDelta bits each, 
representing the deltas from the average start pointer using <a 
href="https://developers.google.com/protocol-buffers/docs/encoding#types";>ZigZag
 encoding</a></li>
+    /// <li>Footer --&gt; CodecFooter (<see 
cref="CodecUtil.WriteFooter(IndexOutput)"/>) </li>
+    /// </list>
+    /// <para>Notes</para>
+    /// <list type="bullet">
     /// <li>For any block, the doc base of the n-th chunk can be restored with
-    /// <code>DocBase + AvgChunkDocs * n + DocBaseDeltas[n]</code>.</li>
+    /// <c>DocBase + AvgChunkDocs * n + DocBaseDeltas[n]</c>.</li>
     /// <li>For any block, the start pointer of the n-th chunk can be restored 
with
-    /// <code>StartPointerBase + AvgChunkSize * n + 
StartPointerDeltas[n]</code>.</li>
+    /// <c>StartPointerBase + AvgChunkSize * n + 
StartPointerDeltas[n]</c>.</li>
     /// <li>Once data is loaded into memory, you can lookup the start pointer 
of any
     /// document by performing two binary searches: a first one based on the 
values
     /// of DocBase in order to find the right block, and then inside the block 
based
     /// on DocBaseDeltas (by reconstructing the doc bases for every 
chunk).</li>
-    /// </ul>
+    /// </list>
     /// @lucene.internal
     /// </summary>
     public sealed class CompressingStoredFieldsIndexWriter : IDisposable

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
index 335aa08..b2fbb74 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
@@ -43,7 +43,8 @@ namespace Lucene.Net.Codecs.Compressing
     using StoredFieldVisitor = Lucene.Net.Index.StoredFieldVisitor;
 
     /// <summary>
-    /// <seealso cref="StoredFieldsReader"/> impl for <seealso 
cref="CompressingStoredFieldsFormat"/>.
+    /// <see cref="StoredFieldsReader"/> impl for <see 
cref="CompressingStoredFieldsFormat"/>.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public sealed class CompressingStoredFieldsReader : StoredFieldsReader
@@ -163,7 +164,7 @@ namespace Lucene.Net.Codecs.Compressing
             }
         }
 
-        /// <exception cref="ObjectDisposedException"> if this FieldsReader is 
closed </exception>
+        /// <exception cref="ObjectDisposedException"> If this FieldsReader is 
disposed. </exception>
         private void EnsureOpen()
         {
             if (closed)
@@ -173,7 +174,7 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Close the underlying <seealso cref="IndexInput"/>s.
+        /// Dispose the underlying <see cref="IndexInput"/>s.
         /// </summary>
         protected override void Dispose(bool disposing)
         {
@@ -501,7 +502,7 @@ namespace Lucene.Net.Codecs.Compressing
             }
 
             /// <summary>
-            /// Go to the chunk containing the provided doc ID.
+            /// Go to the chunk containing the provided <paramref name="doc"/> 
ID.
             /// </summary>
             internal void Next(int doc)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs
index 4f8f949..465e1d1 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs
@@ -28,7 +28,8 @@ namespace Lucene.Net.Codecs.Compressing
      */
 
     /// <summary>
-    /// <seealso cref="StoredFieldsWriter"/> impl for <seealso 
cref="CompressingStoredFieldsFormat"/>.
+    /// <see cref="StoredFieldsWriter"/> impl for <see 
cref="CompressingStoredFieldsFormat"/>.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public sealed class CompressingStoredFieldsWriter : StoredFieldsWriter
@@ -167,7 +168,7 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// NOTE: This was saveInts() in Lucene
+        /// NOTE: This was saveInts() in Lucene.
         /// </summary>
         private static void SaveInt32s(int[] values, int length, DataOutput 
@out)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs
index 8952cc5..7d4a22e 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs
@@ -24,8 +24,9 @@ namespace Lucene.Net.Codecs.Compressing
     using SegmentInfo = Lucene.Net.Index.SegmentInfo;
 
     /// <summary>
-    /// A <seealso cref="TermVectorsFormat"/> that compresses chunks of 
documents together in
+    /// A <see cref="TermVectorsFormat"/> that compresses chunks of documents 
together in
     /// order to improve the compression ratio.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public class CompressingTermVectorsFormat : TermVectorsFormat
@@ -36,30 +37,30 @@ namespace Lucene.Net.Codecs.Compressing
         private readonly int chunkSize;
 
         /// <summary>
-        /// Create a new <seealso cref="CompressingTermVectorsFormat"/>.
-        /// <p>
-        /// <code>formatName</code> is the name of the format. this name will 
be used
+        /// Create a new <see cref="CompressingTermVectorsFormat"/>.
+        /// <para/>
+        /// <paramref name="formatName"/> is the name of the format. this name 
will be used
         /// in the file formats to perform
-        /// <seealso cref="CodecUtil#checkHeader(Lucene.Net.Store.DataInput, 
String, int, int) codec header checks"/>.
-        /// <p>
-        /// The <code>compressionMode</code> parameter allows you to choose 
between
+        /// codec header checks (<see 
cref="CodecUtil.CheckHeader(Lucene.Net.Store.DataInput, string, int, int)"/>).
+        /// <para/>
+        /// The <paramref name="compressionMode"/> parameter allows you to 
choose between
         /// compression algorithms that have various compression and 
decompression
         /// speeds so that you can pick the one that best fits your indexing 
and
         /// searching throughput. You should never instantiate two
-        /// <seealso cref="CompressingTermVectorsFormat"/>s that have the same 
name but
-        /// different <seealso cref="compressionMode"/>s.
-        /// <p>
-        /// <code>chunkSize</code> is the minimum byte size of a chunk of 
documents.
-        /// Higher values of <code>chunkSize</code> should improve the 
compression
+        /// <see cref="CompressingTermVectorsFormat"/>s that have the same 
name but
+        /// different <see cref="CompressionMode"/>s.
+        /// <para/>
+        /// <paramref name="chunkSize"/> is the minimum byte size of a chunk 
of documents.
+        /// Higher values of <paramref name="chunkSize"/> should improve the 
compression
         /// ratio but will require more memory at indexing time and might make 
document
         /// loading a little slower (depending on the size of your OS cache 
compared
         /// to the size of your index).
         /// </summary>
-        /// <param name="formatName"> the name of the <seealso 
cref="StoredFieldsFormat"/> </param>
-        /// <param name="segmentSuffix"> a suffix to append to files created 
by this format </param>
-        /// <param name="compressionMode"> the <seealso 
cref="compressionMode"/> to use </param>
-        /// <param name="chunkSize"> the minimum number of bytes of a single 
chunk of stored documents </param>
-        /// <seealso cref= compressionMode </seealso>
+        /// <param name="formatName"> The name of the <see 
cref="StoredFieldsFormat"/>. </param>
+        /// <param name="segmentSuffix"> A suffix to append to files created 
by this format. </param>
+        /// <param name="compressionMode"> The <see cref="CompressionMode"/> 
to use. </param>
+        /// <param name="chunkSize"> The minimum number of bytes of a single 
chunk of stored documents. </param>
+        /// <seealso cref="CompressionMode"/>
         public CompressingTermVectorsFormat(string formatName, string 
segmentSuffix, CompressionMode compressionMode, int chunkSize)
         {
             this.formatName = formatName;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
index 2d8ea75..5b69663 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
@@ -29,7 +29,8 @@ namespace Lucene.Net.Codecs.Compressing
      */
 
     /// <summary>
-    /// <seealso cref="TermVectorsReader"/> for <seealso 
cref="CompressingTermVectorsFormat"/>.
+    /// <see cref="TermVectorsReader"/> for <see 
cref="CompressingTermVectorsFormat"/>.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public sealed class CompressingTermVectorsReader : TermVectorsReader, 
IDisposable
@@ -174,7 +175,7 @@ namespace Lucene.Net.Codecs.Compressing
             }
         }
 
-        /// <exception cref="ObjectDisposedException"> if this 
TermVectorsReader is closed </exception>
+        /// <exception cref="ObjectDisposedException"> if this <see 
cref="TermVectorsReader"/> is disposed. </exception>
         private void EnsureOpen()
         {
             if (closed)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
index 042c319..2d23dbc 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
@@ -48,7 +48,8 @@ namespace Lucene.Net.Codecs.Compressing
     using StringHelper = Lucene.Net.Util.StringHelper;
 
     /// <summary>
-    /// <seealso cref="TermVectorsWriter"/> for <seealso 
cref="CompressingTermVectorsFormat"/>.
+    /// <see cref="TermVectorsWriter"/> for <see 
cref="CompressingTermVectorsFormat"/>.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public sealed class CompressingTermVectorsWriter : TermVectorsWriter
@@ -84,7 +85,7 @@ namespace Lucene.Net.Codecs.Compressing
         private readonly int chunkSize;
 
         /// <summary>
-        /// a pending doc </summary>
+        /// A pending doc. </summary>
         private class DocData
         {
             private readonly CompressingTermVectorsWriter outerInstance;
@@ -152,7 +153,7 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// a pending field </summary>
+        /// A pending field. </summary>
         private class FieldData
         {
             private readonly CompressingTermVectorsWriter outerInstance;
@@ -441,7 +442,7 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Returns a sorted array containing unique field numbers </summary>
+        /// Returns a sorted array containing unique field numbers. </summary>
         private int[] FlushFieldNums()
         {
             SortedSet<int> fieldNums = new SortedSet<int>();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs 
b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
index ce0857c..dda993a 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
@@ -30,6 +30,7 @@ namespace Lucene.Net.Codecs.Compressing
     /// <summary>
     /// A compression mode. Tells how much effort should be spent on 
compression and
     /// decompression of stored fields.
+    /// <para/>
     /// @lucene.experimental
     /// </summary>
     public abstract class CompressionMode
@@ -95,8 +96,8 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// this compression mode is similar to <seealso cref="#FAST"/> but it 
spends more time
-        /// compressing in order to improve the compression ratio. this 
compression
+        /// This compression mode is similar to <see cref="FAST"/> but it 
spends more time
+        /// compressing in order to improve the compression ratio. This 
compression
         /// mode is best used with indices that have a low update rate but 
should be
         /// able to load documents from disk quickly.
         /// </summary>
@@ -131,12 +132,12 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Create a new <seealso cref="Compressor"/> instance.
+        /// Create a new <see cref="Compressor"/> instance.
         /// </summary>
         public abstract Compressor NewCompressor();
 
         /// <summary>
-        /// Create a new <seealso cref="Decompressor"/> instance.
+        /// Create a new <see cref="Decompressor"/> instance.
         /// </summary>
         public abstract Decompressor NewDecompressor();
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/Compressor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/Compressor.cs 
b/src/Lucene.Net/Codecs/Compressing/Compressor.cs
index 666e90a..0f0e523 100644
--- a/src/Lucene.Net/Codecs/Compressing/Compressor.cs
+++ b/src/Lucene.Net/Codecs/Compressing/Compressor.cs
@@ -31,8 +31,8 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Compress bytes into <code>out</code>. It it the responsibility of 
the
-        /// compressor to add all necessary information so that a <seealso 
cref="Decompressor"/>
+        /// Compress bytes into <paramref name="out"/>. It it the 
responsibility of the
+        /// compressor to add all necessary information so that a <see 
cref="Decompressor"/>
         /// will know when to stop decompressing bytes from the stream.
         /// </summary>
         public abstract void Compress(byte[] bytes, int off, int len, 
DataOutput @out);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/Decompressor.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/Decompressor.cs 
b/src/Lucene.Net/Codecs/Compressing/Decompressor.cs
index d1e0641..726841d 100644
--- a/src/Lucene.Net/Codecs/Compressing/Decompressor.cs
+++ b/src/Lucene.Net/Codecs/Compressing/Decompressor.cs
@@ -33,18 +33,18 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Decompress bytes that were stored between offsets 
<code>offset</code> and
-        /// <code>offset+length</code> in the original stream from the 
compressed
-        /// stream <code>in</code> to <code>bytes</code>. After returning, the 
length
-        /// of <code>bytes</code> (<code>bytes.length</code>) must be equal to
-        /// <code>length</code>. Implementations of this method are free to 
resize
-        /// <code>bytes</code> depending on their needs.
+        /// Decompress bytes that were stored between offsets <paramref 
name="offset"/> and
+        /// <c>offset+length</c> in the original stream from the compressed
+        /// stream <paramref name="in"/> to <paramref name="bytes"/>. After 
returning, the length
+        /// of <paramref name="bytes"/> (<c>bytes.Length</c>) must be equal to
+        /// <paramref name="length"/>. Implementations of this method are free 
to resize
+        /// <paramref name="bytes"/> depending on their needs.
         /// </summary>
-        /// <param name="in"> the input that stores the compressed stream 
</param>
-        /// <param name="originalLength"> the length of the original data 
(before compression) </param>
-        /// <param name="offset"> bytes before this offset do not need to be 
decompressed </param>
-        /// <param name="length"> bytes after <code>offset+length</code> do 
not need to be decompressed </param>
-        /// <param name="bytes"> a <seealso cref="BytesRef"/> where to store 
the decompressed data </param>
+        /// <param name="in"> The input that stores the compressed stream. 
</param>
+        /// <param name="originalLength"> The length of the original data 
(before compression). </param>
+        /// <param name="offset"> Bytes before this offset do not need to be 
decompressed. </param>
+        /// <param name="length"> Bytes after <c>offset+length</c> do not need 
to be decompressed. </param>
+        /// <param name="bytes"> a <see cref="BytesRef"/> where to store the 
decompressed data. </param>
         public abstract void Decompress(DataInput @in, int originalLength, int 
offset, int length, BytesRef bytes);
 
         public abstract object Clone();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/5dc5193a/src/Lucene.Net/Codecs/Compressing/LZ4.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net/Codecs/Compressing/LZ4.cs 
b/src/Lucene.Net/Codecs/Compressing/LZ4.cs
index cf05994..0c4e38d 100644
--- a/src/Lucene.Net/Codecs/Compressing/LZ4.cs
+++ b/src/Lucene.Net/Codecs/Compressing/LZ4.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.Codecs.Compressing
 
     /// <summary>
     /// LZ4 compression and decompression routines.
-    ///
+    /// <para/>
     /// http://code.google.com/p/lz4/
     /// http://fastcompression.blogspot.fr/p/lz4.html
     /// </summary>
@@ -56,7 +56,7 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// NOTE: This was readInt() in Lucene
+        /// NOTE: This was readInt() in Lucene.
         /// </summary>
         private static int ReadInt32(byte[] buf, int i)
         {
@@ -65,7 +65,7 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// NOTE: This was readIntEquals() in Lucene
+        /// NOTE: This was readIntEquals() in Lucene.
         /// </summary>
         private static bool ReadInt32Equals(byte[] buf, int i, int j)
         {
@@ -94,8 +94,8 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Decompress at least <code>decompressedLen</code> bytes into
-        /// <code>dest[dOff:]</code>. Please note that <code>dest</code> must 
be large
+        /// Decompress at least <paramref name="decompressedLen"/> bytes into
+        /// <c>dest[dOff]</c>. Please note that <paramref name="dest"/> must 
be large
         /// enough to be able to hold <b>all</b> decompressed data (meaning 
that you
         /// need to know the total decompressed length).
         /// </summary>
@@ -241,8 +241,8 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Compress <code>bytes[off:off+len]</code> into <code>out</code> 
using
-        /// at most 16KB of memory. <code>ht</code> shouldn't be shared across 
threads
+        /// Compress <c>bytes[off:off+len]</c> into <paramref name="out"/> 
using
+        /// at most 16KB of memory. <paramref name="ht"/> shouldn't be shared 
across threads
         /// but can safely be reused.
         /// </summary>
         public static void Compress(byte[] bytes, int off, int len, DataOutput 
@out, HashTable ht)
@@ -475,12 +475,12 @@ namespace Lucene.Net.Codecs.Compressing
         }
 
         /// <summary>
-        /// Compress <code>bytes[off:off+len]</code> into <code>out</code>. 
Compared to
-        /// <seealso cref="LZ4#compress(byte[], int, int, DataOutput, 
HashTable)"/>, this method
+        /// Compress <c>bytes[off:off+len]</c> into <paramref name="out"/>. 
Compared to
+        /// <see cref="LZ4.Compress(byte[], int, int, DataOutput, 
HashTable)"/>, this method
         /// is slower and uses more memory (~ 256KB per thread) but should 
provide
         /// better compression ratios (especially on large inputs) because it 
chooses
         /// the best match among up to 256 candidates and then performs 
trade-offs to
-        /// fix overlapping matches. <code>ht</code> shouldn't be shared 
across threads
+        /// fix overlapping matches. <paramref name="ht"/> shouldn't be shared 
across threads
         /// but can safely be reused.
         /// </summary>
         public static void CompressHC(byte[] src, int srcOff, int srcLen, 
DataOutput @out, HCHashTable ht)

Reply via email to