http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/Terms.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/Terms.cs b/src/Lucene.Net/Index/Terms.cs index 215a505..69e2ca0 100644 --- a/src/Lucene.Net/Index/Terms.cs +++ b/src/Lucene.Net/Index/Terms.cs @@ -24,7 +24,8 @@ namespace Lucene.Net.Index using CompiledAutomaton = Lucene.Net.Util.Automaton.CompiledAutomaton; /// <summary> - /// Access to the terms in a specific field. See <seealso cref="Fields"/>. + /// Access to the terms in a specific field. See <see cref="Fields"/>. + /// <para/> /// @lucene.experimental /// </summary> #if FEATURE_SERIALIZABLE @@ -34,7 +35,7 @@ namespace Lucene.Net.Index { /// <summary> /// Sole constructor. (For invocation by subclass - /// constructors, typically implicit.) + /// constructors, typically implicit.) /// </summary> protected Terms() { @@ -42,25 +43,25 @@ namespace Lucene.Net.Index /// <summary> /// Returns an iterator that will step through all - /// terms. this method will not return null. If you have - /// a previous TermsEnum, for example from a different - /// field, you can pass it for possible reuse if the - /// implementation can do so. + /// terms. This method will not return <c>null</c>. If you have + /// a previous <see cref="TermsEnum"/>, for example from a different + /// field, you can pass it for possible reuse if the + /// implementation can do so. /// </summary> public abstract TermsEnum GetIterator(TermsEnum reuse); /// <summary> - /// Returns a TermsEnum that iterates over all terms that - /// are accepted by the provided {@link - /// CompiledAutomaton}. If the <code>startTerm</code> is - /// provided then the returned enum will only accept terms - /// > <code>startTerm</code>, but you still must call - /// next() first to get to the first term. Note that the - /// provided <code>startTerm</code> must be accepted by - /// the automaton. + /// Returns a <see cref="TermsEnum"/> that iterates over all terms that + /// are accepted by the provided + /// <see cref="CompiledAutomaton"/>. If the <paramref name="startTerm"/> is + /// provided then the returned enum will only accept terms + /// > <paramref name="startTerm"/>, but you still must call + /// <see cref="TermsEnum.Next()"/> first to get to the first term. Note that the + /// provided <paramref name="startTerm"/> must be accepted by + /// the automaton. /// - /// <p><b>NOTE</b>: the returned TermsEnum cannot - /// seek</p>. + /// <para><b>NOTE</b>: the returned <see cref="TermsEnum"/> cannot + /// seek</para>. /// </summary> public virtual TermsEnum Intersect(CompiledAutomaton compiled, BytesRef startTerm) { @@ -105,11 +106,11 @@ namespace Lucene.Net.Index } /// <summary> - /// Return the BytesRef Comparer used to sort terms - /// provided by the iterator. this method may return null - /// if there are no terms. this method may be invoked - /// many times; it's best to cache a single instance & - /// reuse it. + /// Return the <see cref="T:IComparer{BytesRef}"/> used to sort terms + /// provided by the iterator. This method may return <c>null</c> + /// if there are no terms. This method may be invoked + /// many times; it's best to cache a single instance & + /// reuse it. /// </summary> public abstract IComparer<BytesRef> Comparer { get; } @@ -123,53 +124,53 @@ namespace Lucene.Net.Index public abstract long Count { get; } /// <summary> - /// Returns the sum of <seealso cref="TermsEnum#totalTermFreq"/> for - /// all terms in this field, or -1 if this measure isn't - /// stored by the codec (or if this fields omits term freq - /// and positions). Note that, just like other term - /// measures, this measure does not take deleted documents - /// into account. + /// Returns the sum of <see cref="TermsEnum.TotalTermFreq"/> for + /// all terms in this field, or -1 if this measure isn't + /// stored by the codec (or if this fields omits term freq + /// and positions). Note that, just like other term + /// measures, this measure does not take deleted documents + /// into account. /// </summary> - public abstract long SumTotalTermFreq { get; } + public abstract long SumTotalTermFreq { get; } // LUCENENET TODO: API Make GetSumTotalTermFreq() (conversion) /// <summary> - /// Returns the sum of <seealso cref="TermsEnum#docFreq()"/> for - /// all terms in this field, or -1 if this measure isn't - /// stored by the codec. Note that, just like other term - /// measures, this measure does not take deleted documents - /// into account. + /// Returns the sum of <see cref="TermsEnum.DocFreq"/> for + /// all terms in this field, or -1 if this measure isn't + /// stored by the codec. Note that, just like other term + /// measures, this measure does not take deleted documents + /// into account. /// </summary> - public abstract long SumDocFreq { get; } + public abstract long SumDocFreq { get; } // LUCENENET TODO: API Make GetSumDocFreq() (conversion) /// <summary> /// Returns the number of documents that have at least one - /// term for this field, or -1 if this measure isn't - /// stored by the codec. Note that, just like other term - /// measures, this measure does not take deleted documents - /// into account. + /// term for this field, or -1 if this measure isn't + /// stored by the codec. Note that, just like other term + /// measures, this measure does not take deleted documents + /// into account. /// </summary> public abstract int DocCount { get; } /// <summary> /// Returns true if documents in this field store - /// per-document term frequency (<seealso cref="DocsEnum#freq"/>). + /// per-document term frequency (<see cref="DocsEnum.Freq"/>). /// </summary> public abstract bool HasFreqs { get; } /// <summary> - /// Returns true if documents in this field store offsets. </summary> + /// Returns <c>true</c> if documents in this field store offsets. </summary> public abstract bool HasOffsets { get; } /// <summary> - /// Returns true if documents in this field store positions. </summary> + /// Returns <c>true</c> if documents in this field store positions. </summary> public abstract bool HasPositions { get; } /// <summary> - /// Returns true if documents in this field store payloads. </summary> + /// Returns <c>true</c> if documents in this field store payloads. </summary> public abstract bool HasPayloads { get; } /// <summary> - /// Zero-length array of <seealso cref="Terms"/>. </summary> + /// Zero-length array of <see cref="Terms"/>. </summary> public static readonly Terms[] EMPTY_ARRAY = new Terms[0]; } } \ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TermsEnum.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TermsEnum.cs b/src/Lucene.Net/Index/TermsEnum.cs index 41733c5..61b6fc1 100644 --- a/src/Lucene.Net/Index/TermsEnum.cs +++ b/src/Lucene.Net/Index/TermsEnum.cs @@ -28,10 +28,10 @@ namespace Lucene.Net.Index /// <summary> /// Iterator to seek (<see cref="SeekCeil(BytesRef)"/>, /// <see cref="SeekExact(BytesRef)"/>) or step through - /// (<see cref="Next"/> terms to obtain frequency information + /// (<see cref="Next()"/> terms to obtain frequency information /// (<see cref="DocFreq"/>), <see cref="DocsEnum"/> or /// <see cref="DocsAndPositionsEnum"/> for the current term - /// (<see cref="Docs"/>). + /// (<see cref="Docs(IBits, DocsEnum)"/>). /// /// <para/>Term enumerations are always ordered by /// <see cref="Comparer"/>. Each term in the enumeration is @@ -40,7 +40,7 @@ namespace Lucene.Net.Index /// <para/>The <see cref="TermsEnum"/> is unpositioned when you first obtain it /// and you must first successfully call <see cref="Next"/> or one /// of the <c>Seek</c> methods. - /// + /// <para/> /// @lucene.experimental /// </summary> #if FEATURE_SERIALIZABLE @@ -95,9 +95,9 @@ namespace Lucene.Net.Index /// <summary> /// Attempts to seek to the exact term, returning - /// true if the term is found. If this returns false, the - /// enum is unpositioned. For some codecs, <see cref="SeekExact"/> may - /// be substantially faster than <see cref="SeekCeil"/>. + /// <c>true</c> if the term is found. If this returns <c>false</c>, the + /// enum is unpositioned. For some codecs, <see cref="SeekExact(BytesRef)"/> may + /// be substantially faster than <see cref="SeekCeil(BytesRef)"/>. /// </summary> public virtual bool SeekExact(BytesRef text) { @@ -157,8 +157,8 @@ namespace Lucene.Net.Index public abstract BytesRef Term { get; } /// <summary> - /// Returns ordinal position for current term. this is an - /// optional method (the codec may throw <see cref="NotSupportedException"/>. + /// Returns ordinal position for current term. This is an + /// optional property (the codec may throw <see cref="NotSupportedException"/>. /// Do not call this when the enum is unpositioned. /// </summary> public abstract long Ord { get; } // LUCENENET NOTE: Although this isn't a great candidate for a property, did so to make API consistent @@ -172,22 +172,22 @@ namespace Lucene.Net.Index /// <summary> /// Returns the total number of occurrences of this term - /// across all documents (the sum of the freq() for each - /// doc that has this term). this will be -1 if the - /// codec doesn't support this measure. Note that, like - /// other term measures, this measure does not take - /// deleted documents into account. + /// across all documents (the sum of the Freq for each + /// doc that has this term). This will be -1 if the + /// codec doesn't support this measure. Note that, like + /// other term measures, this measure does not take + /// deleted documents into account. /// </summary> public abstract long TotalTermFreq { get; } // LUCENENET NOTE: Although this isn't a great candidate for a property, did so to make API consistent /// <summary> /// Get <see cref="DocsEnum"/> for the current term. Do not /// call this when the enum is unpositioned. This method - /// will not return null. + /// will not return <c>null</c>. /// </summary> - /// <param name="liveDocs"> unset bits are documents that should not + /// <param name="liveDocs"> Unset bits are documents that should not /// be returned </param> - /// <param name="reuse"> pass a prior <see cref="DocsEnum"/> for possible reuse </param> + /// <param name="reuse"> Pass a prior <see cref="DocsEnum"/> for possible reuse </param> public DocsEnum Docs(IBits liveDocs, DocsEnum reuse) { return Docs(liveDocs, reuse, DocsFlags.FREQS); @@ -197,26 +197,26 @@ namespace Lucene.Net.Index /// Get <see cref="DocsEnum"/> for the current term, with /// control over whether freqs are required. Do not /// call this when the enum is unpositioned. This method - /// will not return null. + /// will not return <c>null</c>. /// </summary> - /// <param name="liveDocs"> unset bits are documents that should not + /// <param name="liveDocs"> Unset bits are documents that should not /// be returned </param> - /// <param name="reuse"> pass a prior DocsEnum for possible reuse </param> - /// <param name="flags"> specifies which optional per-document values + /// <param name="reuse"> Pass a prior DocsEnum for possible reuse </param> + /// <param name="flags"> Specifies which optional per-document values /// you require; <see cref="DocsFlags"/></param> /// <seealso cref="Docs(IBits, DocsEnum)"/> public abstract DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags); /// <summary> - /// Get <seealso cref="DocsAndPositionsEnum"/> for the current term. - /// Do not call this when the enum is unpositioned. this - /// method will return null if positions were not - /// indexed. + /// Get <see cref="DocsAndPositionsEnum"/> for the current term. + /// Do not call this when the enum is unpositioned. This + /// method will return <c>null</c> if positions were not + /// indexed. /// </summary> - /// <param name="liveDocs"> unset bits are documents that should not - /// be returned </param> - /// <param name="reuse"> pass a prior DocsAndPositionsEnum for possible reuse </param> - /// <seealso cref= #docsAndPositions(Bits, DocsAndPositionsEnum, int) </seealso> + /// <param name="liveDocs"> Unset bits are documents that should not + /// be returned </param> + /// <param name="reuse"> Pass a prior DocsAndPositionsEnum for possible reuse </param> + /// <seealso cref="DocsAndPositions(IBits, DocsAndPositionsEnum, DocsAndPositionsFlags)"/> public DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse) { return DocsAndPositions(liveDocs, reuse, DocsAndPositionsFlags.OFFSETS | DocsAndPositionsFlags.PAYLOADS); @@ -228,20 +228,20 @@ namespace Lucene.Net.Index /// required. Some codecs may be able to optimize their /// implementation when offsets and/or payloads are not required. /// Do not call this when the enum is unpositioned. This - /// will return null if positions were not indexed. + /// will return <c>null</c> if positions were not indexed. /// </summary> - /// <param name="liveDocs"> unset bits are documents that should not + /// <param name="liveDocs"> Unset bits are documents that should not /// be returned </param> - /// <param name="reuse"> pass a prior DocsAndPositionsEnum for possible reuse </param> - /// <param name="flags"> specifies which optional per-position values you + /// <param name="reuse"> Pass a prior DocsAndPositionsEnum for possible reuse </param> + /// <param name="flags"> Specifies which optional per-position values you /// require; see <see cref="DocsAndPositionsFlags"/>. </param> public abstract DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags); /// <summary> - /// Expert: Returns the TermsEnums internal state to position the <see cref="TermsEnum"/> + /// Expert: Returns the <see cref="TermsEnum"/>s internal state to position the <see cref="TermsEnum"/> /// without re-seeking the term dictionary. /// <para/> - /// NOTE: A seek by <see cref="GetTermState"/> might not capture the + /// NOTE: A seek by <see cref="GetTermState()"/> might not capture the /// <see cref="AttributeSource"/>'s state. Callers must maintain the /// <see cref="AttributeSource"/> states separately /// </summary> @@ -272,7 +272,7 @@ namespace Lucene.Net.Index /// in <see cref="Lucene.Net.Search.MultiTermQuery"/> /// <para/><em>Please note:</em> this enum should be unmodifiable, /// but it is currently possible to add Attributes to it. - /// this should not be a problem, as the enum is always empty and + /// This should not be a problem, as the enum is always empty and /// the existence of unused Attributes does not matter. /// </summary> public static readonly TermsEnum EMPTY = new TermsEnumAnonymousInnerClassHelper(); http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TermsHash.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TermsHash.cs b/src/Lucene.Net/Index/TermsHash.cs index 77c2d6e..d0a856c 100644 --- a/src/Lucene.Net/Index/TermsHash.cs +++ b/src/Lucene.Net/Index/TermsHash.cs @@ -26,13 +26,13 @@ namespace Lucene.Net.Index using Int32BlockPool = Lucene.Net.Util.Int32BlockPool; /// <summary> - /// this class implements <seealso cref="InvertedDocConsumer"/>, which - /// is passed each token produced by the analyzer on each - /// field. It stores these tokens in a hash table, and - /// allocates separate byte streams per token. Consumers of - /// this class, eg <seealso cref="FreqProxTermsWriter"/> and {@link - /// TermVectorsConsumer}, write their own byte streams - /// under each term. + /// This class implements <see cref="InvertedDocConsumer"/>, which + /// is passed each token produced by the analyzer on each + /// field. It stores these tokens in a hash table, and + /// allocates separate byte streams per token. Consumers of + /// this class, eg <see cref="FreqProxTermsWriter"/> and + /// <see cref="TermVectorsConsumer"/>, write their own byte streams + /// under each term. /// </summary> #if FEATURE_SERIALIZABLE [Serializable] http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TermsHashConsumerPerField.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TermsHashConsumerPerField.cs b/src/Lucene.Net/Index/TermsHashConsumerPerField.cs index 3888c01..f5972df 100644 --- a/src/Lucene.Net/Index/TermsHashConsumerPerField.cs +++ b/src/Lucene.Net/Index/TermsHashConsumerPerField.cs @@ -21,10 +21,10 @@ namespace Lucene.Net.Index */ /// <summary> - /// Implement this class to plug into the TermsHash - /// processor, which inverts & stores Tokens into a hash - /// table and provides an API for writing bytes into - /// multiple streams for each unique Token. + /// Implement this class to plug into the <see cref="TermsHash"/> + /// processor, which inverts & stores <see cref="Analysis.Token"/>s into a hash + /// table and provides an API for writing bytes into + /// multiple streams for each unique <see cref="Analysis.Token"/>. /// </summary> #if FEATURE_SERIALIZABLE [Serializable] http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TermsHashPerField.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TermsHashPerField.cs b/src/Lucene.Net/Index/TermsHashPerField.cs index 77aa688..1769e38 100644 --- a/src/Lucene.Net/Index/TermsHashPerField.cs +++ b/src/Lucene.Net/Index/TermsHashPerField.cs @@ -121,7 +121,7 @@ namespace Lucene.Net.Index } /// <summary> - /// Collapse the hash table & sort in-place. </summary> + /// Collapse the hash table & sort in-place. </summary> public int[] SortPostings(IComparer<BytesRef> termComp) { return bytesHash.Sort(termComp); @@ -152,9 +152,11 @@ namespace Lucene.Net.Index return doCall || doNextCall; } - // Secondary entry point (for 2nd & subsequent TermsHash), - // because token text has already been "interned" into - // textStart, so we hash by textStart + /// <summary> + /// Secondary entry point (for 2nd & subsequent <see cref="TermsHash"/>), + /// because token text has already been "interned" into + /// <paramref name="textStart"/>, so we hash by <paramref name="textStart"/> + /// </summary> public void Add(int textStart) { int termID = bytesHash.AddByPoolOffset(textStart); http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs index 9d0d5e3..34b49ba 100644 --- a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs +++ b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs @@ -26,14 +26,14 @@ namespace Lucene.Net.Index using ThreadState = Lucene.Net.Index.DocumentsWriterPerThreadPool.ThreadState; //javadoc /// <summary> - /// A <seealso cref="DocumentsWriterPerThreadPool"/> implementation that tries to assign an - /// indexing thread to the same <seealso cref="ThreadState"/> each time the thread tries to - /// obtain a <seealso cref="ThreadState"/>. Once a new <seealso cref="ThreadState"/> is created it is + /// A <see cref="DocumentsWriterPerThreadPool"/> implementation that tries to assign an + /// indexing thread to the same <see cref="ThreadState"/> each time the thread tries to + /// obtain a <see cref="ThreadState"/>. Once a new <see cref="ThreadState"/> is created it is /// associated with the creating thread. Subsequently, if the threads associated - /// <seealso cref="ThreadState"/> is not in use it will be associated with the requesting - /// thread. Otherwise, if the <seealso cref="ThreadState"/> is used by another thread - /// <seealso cref="ThreadAffinityDocumentsWriterThreadPool"/> tries to find the currently - /// minimal contended <seealso cref="ThreadState"/>. + /// <see cref="ThreadState"/> is not in use it will be associated with the requesting + /// thread. Otherwise, if the <see cref="ThreadState"/> is used by another thread + /// <see cref="ThreadAffinityDocumentsWriterThreadPool"/> tries to find the currently + /// minimal contended <seea cref="ThreadState"/>. /// </summary> #if FEATURE_SERIALIZABLE [Serializable] @@ -43,7 +43,7 @@ namespace Lucene.Net.Index private IDictionary<Thread, ThreadState> threadBindings = new ConcurrentDictionary<Thread, ThreadState>(); /// <summary> - /// Creates a new <seealso cref="ThreadAffinityDocumentsWriterThreadPool"/> with a given maximum of <seealso cref="ThreadState"/>s. + /// Creates a new <see cref="ThreadAffinityDocumentsWriterThreadPool"/> with a given maximum of <see cref="ThreadState"/>s. /// </summary> public ThreadAffinityDocumentsWriterThreadPool(int maxNumPerThreads) : base(maxNumPerThreads) http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TieredMergePolicy.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TieredMergePolicy.cs b/src/Lucene.Net/Index/TieredMergePolicy.cs index f41f6c6..1fa5a72 100644 --- a/src/Lucene.Net/Index/TieredMergePolicy.cs +++ b/src/Lucene.Net/Index/TieredMergePolicy.cs @@ -57,7 +57,7 @@ namespace Lucene.Net.Index /// of the segments, always pro-rates by percent deletes, /// and does not apply any maximum segment size during /// forceMerge (unlike <see cref="LogByteSizeMergePolicy"/>). - /// + /// <para/> /// @lucene.experimental /// </summary> http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TrackingIndexWriter.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TrackingIndexWriter.cs b/src/Lucene.Net/Index/TrackingIndexWriter.cs index 5789685..8d02cf7 100644 --- a/src/Lucene.Net/Index/TrackingIndexWriter.cs +++ b/src/Lucene.Net/Index/TrackingIndexWriter.cs @@ -27,15 +27,15 @@ namespace Lucene.Net.Index /// <summary> /// Class that tracks changes to a delegated - /// IndexWriter, used by {@link - /// ControlledRealTimeReopenThread} to ensure specific - /// changes are visible. Create this class (passing your - /// IndexWriter), and then pass this class to {@link - /// ControlledRealTimeReopenThread}. - /// Be sure to make all changes via the - /// TrackingIndexWriter, otherwise {@link - /// ControlledRealTimeReopenThread} won't know about the changes. - /// + /// <see cref="Index.IndexWriter"/>, used by + /// <see cref="Search.ControlledRealTimeReopenThread{T}"/> to ensure specific + /// changes are visible. Create this class (passing your + /// <see cref="Index.IndexWriter"/>), and then pass this class to + /// <see cref="Search.ControlledRealTimeReopenThread{T}"/>. + /// Be sure to make all changes via the + /// <see cref="TrackingIndexWriter"/>, otherwise + /// <see cref="Search.ControlledRealTimeReopenThread{T}"/> won't know about the changes. + /// <para/> /// @lucene.experimental /// </summary> #if FEATURE_SERIALIZABLE @@ -47,8 +47,8 @@ namespace Lucene.Net.Index private readonly AtomicInt64 indexingGen = new AtomicInt64(1); /// <summary> - /// Create a {@code TrackingIndexWriter} wrapping the - /// provided <seealso cref="IndexWriter"/>. + /// Create a <see cref="TrackingIndexWriter"/> wrapping the + /// provided <see cref="Index.IndexWriter"/>. /// </summary> public TrackingIndexWriter(IndexWriter writer) { @@ -56,9 +56,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls {@link - /// IndexWriter#updateDocument(Term,Iterable,Analyzer)} - /// and returns the generation that reflects this change. + /// Calls + /// <see cref="IndexWriter.UpdateDocument(Term, IEnumerable{IIndexableField}, Analyzer)"/> + /// and returns the generation that reflects this change. /// </summary> public virtual long UpdateDocument(Term t, IEnumerable<IIndexableField> d, Analyzer a) { @@ -68,9 +68,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls {@link - /// IndexWriter#updateDocument(Term,Iterable)} and - /// returns the generation that reflects this change. + /// Calls + /// <see cref="IndexWriter.UpdateDocument(Term, IEnumerable{IIndexableField})"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long UpdateDocument(Term t, IEnumerable<IIndexableField> d) { @@ -80,9 +80,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls {@link - /// IndexWriter#updateDocuments(Term,Iterable,Analyzer)} - /// and returns the generation that reflects this change. + /// Calls + /// <see cref="IndexWriter.UpdateDocuments(Term, IEnumerable{IEnumerable{IIndexableField}}, Analyzer)"/> + /// and returns the generation that reflects this change. /// </summary> public virtual long UpdateDocuments(Term t, IEnumerable<IEnumerable<IIndexableField>> docs, Analyzer a) { @@ -92,9 +92,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls {@link - /// IndexWriter#updateDocuments(Term,Iterable)} and returns - /// the generation that reflects this change. + /// Calls + /// <see cref="IndexWriter.UpdateDocuments(Term, IEnumerable{IEnumerable{IIndexableField}})"/> and returns + /// the generation that reflects this change. /// </summary> public virtual long UpdateDocuments(Term t, IEnumerable<IEnumerable<IIndexableField>> docs) { @@ -104,8 +104,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#deleteDocuments(Term)"/> and - /// returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.DeleteDocuments(Term)"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(Term t) { @@ -115,8 +115,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#deleteDocuments(Term...)"/> and - /// returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.DeleteDocuments(Term[])"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(params Term[] terms) { @@ -126,8 +126,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#deleteDocuments(Query)"/> and - /// returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.DeleteDocuments(Query)"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(Query q) { @@ -137,8 +137,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#deleteDocuments(Query...)"/> - /// and returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.DeleteDocuments(Query[])"/> + /// and returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(params Query[] queries) { @@ -148,8 +148,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#deleteAll"/> and returns the - /// generation that reflects this change. + /// Calls <see cref="IndexWriter.DeleteAll()"/> and returns the + /// generation that reflects this change. /// </summary> public virtual long DeleteAll() { @@ -159,9 +159,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls {@link - /// IndexWriter#addDocument(Iterable,Analyzer)} and - /// returns the generation that reflects this change. + /// Calls + /// <see cref="IndexWriter.AddDocument(IEnumerable{IIndexableField}, Analyzer)"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long AddDocument(IEnumerable<IIndexableField> d, Analyzer a) { @@ -171,9 +171,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls {@link - /// IndexWriter#addDocuments(Iterable,Analyzer)} and - /// returns the generation that reflects this change. + /// Calls + /// <see cref="IndexWriter.AddDocuments(IEnumerable{IEnumerable{IIndexableField}}, Analyzer)"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long AddDocuments(IEnumerable<IEnumerable<IIndexableField>> docs, Analyzer a) { @@ -183,8 +183,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#addDocument(Iterable)"/> - /// and returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.AddDocument(IEnumerable{IIndexableField})"/> + /// and returns the generation that reflects this change. /// </summary> public virtual long AddDocument(IEnumerable<IIndexableField> d) { @@ -194,8 +194,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#addDocuments(Iterable)"/> and - /// returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.AddDocuments(IEnumerable{IEnumerable{IIndexableField}})"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long AddDocuments(IEnumerable<IEnumerable<IIndexableField>> docs) { @@ -205,8 +205,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#addIndexes(Directory...)"/> and - /// returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.AddIndexes(Directory[])"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long AddIndexes(params Directory[] dirs) { @@ -216,8 +216,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Calls <seealso cref="IndexWriter#addIndexes(IndexReader...)"/> - /// and returns the generation that reflects this change. + /// Calls <see cref="IndexWriter.AddIndexes(IndexReader[])"/> + /// and returns the generation that reflects this change. /// </summary> public virtual long AddIndexes(params IndexReader[] readers) { @@ -237,7 +237,7 @@ namespace Lucene.Net.Index } /// <summary> - /// Return the wrapped <seealso cref="IndexWriter"/>. </summary> + /// Return the wrapped <see cref="Index.IndexWriter"/>. </summary> public virtual IndexWriter IndexWriter { get @@ -248,7 +248,7 @@ namespace Lucene.Net.Index /// <summary> /// Return and increment current gen. - /// + /// <para/> /// @lucene.internal /// </summary> public virtual long GetAndIncrementGeneration() @@ -257,9 +257,9 @@ namespace Lucene.Net.Index } /// <summary> - /// Cals {@link - /// IndexWriter#tryDeleteDocument(IndexReader,int)} and - /// returns the generation that reflects this change. + /// Cals + /// <see cref="IndexWriter.TryDeleteDocument(IndexReader, int)"/> and + /// returns the generation that reflects this change. /// </summary> public virtual long TryDeleteDocument(IndexReader reader, int docID) { http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TwoPhaseCommit.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TwoPhaseCommit.cs b/src/Lucene.Net/Index/TwoPhaseCommit.cs index 0fef4a6..af896b5 100644 --- a/src/Lucene.Net/Index/TwoPhaseCommit.cs +++ b/src/Lucene.Net/Index/TwoPhaseCommit.cs @@ -19,9 +19,9 @@ namespace Lucene.Net.Index /// <summary> /// An interface for implementations that support 2-phase commit. You can use - /// <seealso cref="TwoPhaseCommitTool"/> to execute a 2-phase commit algorithm over several - /// <seealso cref="ITwoPhaseCommit"/>s. - /// + /// <see cref="TwoPhaseCommitTool"/> to execute a 2-phase commit algorithm over several + /// <see cref="ITwoPhaseCommit"/>s. + /// <para/> /// @lucene.experimental /// </summary> public interface ITwoPhaseCommit @@ -29,14 +29,14 @@ namespace Lucene.Net.Index /// <summary> /// The first stage of a 2-phase commit. Implementations should do as much work /// as possible in this method, but avoid actual committing changes. If the - /// 2-phase commit fails, <seealso cref="#rollback()"/> is called to discard all changes + /// 2-phase commit fails, <see cref="Rollback()"/> is called to discard all changes /// since last successful commit. /// </summary> void PrepareCommit(); /// <summary> /// The second phase of a 2-phase commit. Implementations should ideally do - /// very little work in this method (following <seealso cref="#prepareCommit()"/>, and + /// very little work in this method (following <see cref="PrepareCommit()"/>, and /// after it returns, the caller can assume that the changes were successfully /// committed to the underlying storage. /// </summary> @@ -44,8 +44,8 @@ namespace Lucene.Net.Index /// <summary> /// Discards any changes that have occurred since the last commit. In a 2-phase - /// commit algorithm, where one of the objects failed to <seealso cref="#commit()"/> or - /// <seealso cref="#prepareCommit()"/>, this method is used to roll all other objects + /// commit algorithm, where one of the objects failed to <see cref="Commit()"/> or + /// <see cref="PrepareCommit()"/>, this method is used to roll all other objects /// back to their previous state. /// </summary> void Rollback(); http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TwoPhaseCommitTool.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TwoPhaseCommitTool.cs b/src/Lucene.Net/Index/TwoPhaseCommitTool.cs index 2b60ce6..06f0e76 100644 --- a/src/Lucene.Net/Index/TwoPhaseCommitTool.cs +++ b/src/Lucene.Net/Index/TwoPhaseCommitTool.cs @@ -24,9 +24,10 @@ namespace Lucene.Net.Index /// <summary> /// A utility for executing 2-phase commit on several objects. + /// <para/> + /// @lucene.experimental /// </summary> - /// <seealso cref= ITwoPhaseCommit - /// @lucene.experimental </seealso> + /// <seealso cref="ITwoPhaseCommit"/> #if FEATURE_SERIALIZABLE [Serializable] #endif @@ -39,8 +40,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Thrown by <seealso cref="TwoPhaseCommitTool#execute(TwoPhaseCommit...)"/> when an - /// object fails to prepareCommit(). + /// Thrown by <see cref="TwoPhaseCommitTool.Execute(ITwoPhaseCommit[])"/> when an + /// object fails to <see cref="ITwoPhaseCommit.PrepareCommit()"/>. /// </summary> // LUCENENET: All exeption classes should be marked serializable #if FEATURE_SERIALIZABLE @@ -76,8 +77,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Thrown by <seealso cref="TwoPhaseCommitTool#execute(TwoPhaseCommit...)"/> when an - /// object fails to commit(). + /// Thrown by <see cref="TwoPhaseCommitTool.Execute(ITwoPhaseCommit[])"/> when an + /// object fails to <see cref="ITwoPhaseCommit.Commit()"/>. /// </summary> // LUCENENET: All exeption classes should be marked serializable #if FEATURE_SERIALIZABLE @@ -112,7 +113,7 @@ namespace Lucene.Net.Index } /// <summary> - /// rollback all objects, discarding any exceptions that occur. </summary> + /// Rollback all objects, discarding any exceptions that occur. </summary> private static void Rollback(params ITwoPhaseCommit[] objects) { foreach (ITwoPhaseCommit tpc in objects) @@ -136,24 +137,24 @@ namespace Lucene.Net.Index /// <summary> /// Executes a 2-phase commit algorithm by first - /// <seealso cref="ITwoPhaseCommit#prepareCommit()"/> all objects and only if all succeed, - /// it proceeds with <seealso cref="ITwoPhaseCommit#commit()"/>. If any of the objects + /// <see cref="ITwoPhaseCommit.PrepareCommit()"/> all objects and only if all succeed, + /// it proceeds with <see cref="ITwoPhaseCommit.Commit()"/>. If any of the objects /// fail on either the preparation or actual commit, it terminates and - /// <seealso cref="ITwoPhaseCommit#rollback()"/> all of them. - /// <p> - /// <b>NOTE:</b> it may happen that an object fails to commit, after few have - /// already successfully committed. this tool will still issue a rollback + /// <see cref="ITwoPhaseCommit.Rollback()"/> all of them. + /// <para/> + /// <b>NOTE:</b> It may happen that an object fails to commit, after few have + /// already successfully committed. This tool will still issue a rollback /// instruction on them as well, but depending on the implementation, it may /// not have any effect. - /// <p> - /// <b>NOTE:</b> if any of the objects are {@code null}, this method simply + /// <para/> + /// <b>NOTE:</b> if any of the objects are <c>null</c>, this method simply /// skips over them. /// </summary> /// <exception cref="PrepareCommitFailException"> /// if any of the objects fail to - /// <seealso cref="ITwoPhaseCommit#prepareCommit()"/> </exception> + /// <see cref="ITwoPhaseCommit.PrepareCommit()"/> </exception> /// <exception cref="CommitFailException"> - /// if any of the objects fail to <seealso cref="ITwoPhaseCommit#commit()"/> </exception> + /// if any of the objects fail to <see cref="ITwoPhaseCommit.Commit()"/> </exception> public static void Execute(params ITwoPhaseCommit[] objects) { ITwoPhaseCommit tpc = null; http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs b/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs index 5f4e59c..7be6a56 100644 --- a/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs +++ b/src/Lucene.Net/Index/TwoStoredFieldsConsumers.cs @@ -20,7 +20,7 @@ namespace Lucene.Net.Index */ /// <summary> - /// Just switches between two <seealso cref="DocFieldConsumer"/>s. </summary> + /// Just switches between two <see cref="DocFieldConsumer"/>s. </summary> #if FEATURE_SERIALIZABLE [Serializable] #endif http://git-wip-us.apache.org/repos/asf/lucenenet/blob/646db0ce/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs ---------------------------------------------------------------------- diff --git a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs index d536b5e..7ec2912 100644 --- a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs +++ b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs @@ -23,39 +23,42 @@ namespace Lucene.Net.Index using Constants = Lucene.Net.Util.Constants; /// <summary> - /// this <seealso cref="MergePolicy"/> is used for upgrading all existing segments of - /// an index when calling <seealso cref="IndexWriter#forceMerge(int)"/>. - /// All other methods delegate to the base {@code MergePolicy} given to the constructor. - /// this allows for an as-cheap-as possible upgrade of an older index by only upgrading segments that - /// are created by previous Lucene versions. forceMerge does no longer really merge; - /// it is just used to "forceMerge" older segment versions away. - /// <p>In general one would use <seealso cref="IndexUpgrader"/>, but for a fully customizeable upgrade, - /// you can use this like any other {@code MergePolicy} and call <seealso cref="IndexWriter#forceMerge(int)"/>: - /// <pre class="prettyprint lang-java"> - /// IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_XX, new KeywordAnalyzer()); - /// iwc.setMergePolicy(new UpgradeIndexMergePolicy(iwc.getMergePolicy())); - /// IndexWriter w = new IndexWriter(dir, iwc); - /// w.forceMerge(1); - /// w.Dispose(); - /// </pre> - /// <p><b>Warning:</b> this merge policy may reorder documents if the index was partially - /// upgraded before calling forceMerge (e.g., documents were added). If your application relies + /// This <see cref="MergePolicy"/> is used for upgrading all existing segments of + /// an index when calling <see cref="IndexWriter.ForceMerge(int)"/>. + /// All other methods delegate to the base <see cref="MergePolicy"/> given to the constructor. + /// This allows for an as-cheap-as possible upgrade of an older index by only upgrading segments that + /// are created by previous Lucene versions. ForceMerge does no longer really merge; + /// it is just used to "ForceMerge" older segment versions away. + /// <para/>In general one would use <see cref="IndexUpgrader"/>, but for a fully customizeable upgrade, + /// you can use this like any other <see cref="MergePolicy"/> and call <see cref="IndexWriter.ForceMerge(int)"/>: + /// <code> + /// IndexWriterConfig iwc = new IndexWriterConfig(LuceneVersion.LUCENE_XX, new KeywordAnalyzer()); + /// iwc.MergePolicy = new UpgradeIndexMergePolicy(iwc.MergePolicy); + /// using (IndexWriter w = new IndexWriter(dir, iwc)) + /// { + /// w.ForceMerge(1); + /// } + /// </code> + /// <para/><b>Warning:</b> this merge policy may reorder documents if the index was partially + /// upgraded before calling <see cref="IndexWriter.ForceMerge(int)"/> (e.g., documents were added). If your application relies /// on "monotonicity" of doc IDs (which means that the order in which the documents - /// were added to the index is preserved), do a forceMerge(1) instead. Please note, the - /// delegate {@code MergePolicy} may also reorder documents. - /// @lucene.experimental </summary> - /// <seealso cref= IndexUpgrader </seealso> + /// were added to the index is preserved), do a <c>ForceMerge(1)</c> instead. Please note, the + /// delegate <see cref="MergePolicy"/> may also reorder documents. + /// <para/> + /// @lucene.experimental + /// </summary> + /// <seealso cref="IndexUpgrader"/> #if FEATURE_SERIALIZABLE [Serializable] #endif public class UpgradeIndexMergePolicy : MergePolicy { /// <summary> - /// Wrapped <seealso cref="MergePolicy"/>. </summary> + /// Wrapped <see cref="MergePolicy"/>. </summary> protected readonly MergePolicy m_base; /// <summary> - /// Wrap the given <seealso cref="MergePolicy"/> and intercept forceMerge requests to + /// Wrap the given <see cref="MergePolicy"/> and intercept <see cref="IndexWriter.ForceMerge(int)"/> requests to /// only upgrade segments written with previous Lucene versions. /// </summary> public UpgradeIndexMergePolicy(MergePolicy @base) @@ -64,8 +67,8 @@ namespace Lucene.Net.Index } /// <summary> - /// Returns if the given segment should be upgraded. The default implementation - /// will return {@code !Constants.LUCENE_MAIN_VERSION.equals(si.getVersion())}, + /// Returns <c>true</c> if the given segment should be upgraded. The default implementation + /// will return <c>!Constants.LUCENE_MAIN_VERSION.Equals(si.Version)</c>, /// so all segments created with a different version number than this Lucene version will /// get upgraded. /// </summary>
