http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b5cae3f3/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs 
b/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
index cd8f315..bbd141c 100644
--- a/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
+++ b/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
@@ -106,7 +106,7 @@ namespace Lucene.Net.Analysis
             Analyzer a = new MockAnalyzer(Random(), single, false);
             AssertAnalyzesTo(a, "foobar", new string[] { "fo", "ob", "ar" }, 
new int[] { 0, 2, 4 }, new int[] { 2, 4, 6 });
             // make sure when last term is a "partial" match that End() is 
correct
-            AssertTokenStreamContents(a.TokenStream("bogus", new 
StringReader("fooba")), new string[] { "fo", "ob" }, new int[] { 0, 2 }, new 
int[] { 2, 4 }, new int[] { 1, 1 }, new int?(5));
+            AssertTokenStreamContents(a.GetTokenStream("bogus", new 
StringReader("fooba")), new string[] { "fo", "ob" }, new int[] { 0, 2 }, new 
int[] { 2, 4 }, new int[] { 1, 1 }, new int?(5));
             CheckRandomData(Random(), a, 100);
         }
 
@@ -119,7 +119,7 @@ namespace Lucene.Net.Analysis
             Analyzer a = new MockAnalyzer(Random(), single, false);
             AssertAnalyzesTo(a, "foobar", new string[] { "foo", "bar" }, new 
int[] { 0, 3 }, new int[] { 3, 6 });
             // make sure when last term is a "partial" match that End() is 
correct
-            AssertTokenStreamContents(a.TokenStream("bogus", new 
StringReader("fooba")), new string[] { "foo" }, new int[] { 0 }, new int[] { 3 
}, new int[] { 1 }, new int?(5));
+            AssertTokenStreamContents(a.GetTokenStream("bogus", new 
StringReader("fooba")), new string[] { "foo" }, new int[] { 0 }, new int[] { 3 
}, new int[] { 1 }, new int?(5));
             CheckRandomData(Random(), a, 100);
         }
 
@@ -170,8 +170,8 @@ namespace Lucene.Net.Analysis
         public virtual void TestTooLongToken()
         {
             Analyzer whitespace = new AnalyzerAnonymousInnerClassHelper(this);
-            AssertTokenStreamContents(whitespace.TokenStream("bogus", new 
StringReader("test 123 toolong ok ")), new string[] { "test", "123", "toolo", 
"ng", "ok" }, new int[] { 0, 5, 9, 14, 17 }, new int[] { 4, 8, 14, 16, 19 }, 
new int?(20));
-            AssertTokenStreamContents(whitespace.TokenStream("bogus", new 
StringReader("test 123 toolo")), new string[] { "test", "123", "toolo" }, new 
int[] { 0, 5, 9 }, new int[] { 4, 8, 14 }, new int?(14));
+            AssertTokenStreamContents(whitespace.GetTokenStream("bogus", new 
StringReader("test 123 toolong ok ")), new string[] { "test", "123", "toolo", 
"ng", "ok" }, new int[] { 0, 5, 9, 14, 17 }, new int[] { 4, 8, 14, 16, 19 }, 
new int?(20));
+            AssertTokenStreamContents(whitespace.GetTokenStream("bogus", new 
StringReader("test 123 toolo")), new string[] { "test", "123", "toolo" }, new 
int[] { 0, 5, 9 }, new int[] { 4, 8, 14 }, new int?(14));
         }
 
         private class AnalyzerAnonymousInnerClassHelper : Analyzer
@@ -197,7 +197,7 @@ namespace Lucene.Net.Analysis
 
             Analyzer analyzer = new MockAnalyzer(Random());
             Exception priorException = null;
-            TokenStream stream = analyzer.TokenStream("dummy", new 
StringReader(testString));
+            TokenStream stream = analyzer.GetTokenStream("dummy", new 
StringReader(testString));
             try
             {
                 stream.Reset();
@@ -282,7 +282,7 @@ namespace Lucene.Net.Analysis
                 MockCharFilter charfilter = new MockCharFilter(reader, 2);
                 MockAnalyzer analyzer = new MockAnalyzer(Random());
                 Exception priorException = null;
-                TokenStream ts = analyzer.TokenStream("bogus", 
charfilter.m_input);
+                TokenStream ts = analyzer.GetTokenStream("bogus", 
charfilter.m_input);
                 try
                 {
                     ts.Reset();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b5cae3f3/src/Lucene.Net.Tests/Document/TestDocument.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Document/TestDocument.cs 
b/src/Lucene.Net.Tests/Document/TestDocument.cs
index 2a73911..2fd16ed 100644
--- a/src/Lucene.Net.Tests/Document/TestDocument.cs
+++ b/src/Lucene.Net.Tests/Document/TestDocument.cs
@@ -370,7 +370,7 @@ namespace Lucene.Net.Documents
             doc.Add(new Field("indexed", "abc xyz", Field.Store.NO, 
Field.Index.NOT_ANALYZED));
             doc.Add(new Field("tokenized", "abc xyz", Field.Store.NO, 
Field.Index.ANALYZED));
             doc.Add(new Field("tokenized_reader", new StringReader("abc 
xyz")));
-            doc.Add(new Field("tokenized_tokenstream", 
w.w.Analyzer.TokenStream("tokenized_tokenstream", new StringReader("abc 
xyz"))));
+            doc.Add(new Field("tokenized_tokenstream", 
w.w.Analyzer.GetTokenStream("tokenized_tokenstream", new StringReader("abc 
xyz"))));
             doc.Add(new Field("binary", new byte[10]));
             doc.Add(new Field("tv", "abc xyz", Field.Store.NO, 
Field.Index.ANALYZED, Field.TermVector.YES));
             doc.Add(new Field("tv_pos", "abc xyz", Field.Store.NO, 
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b5cae3f3/src/Lucene.Net.Tests/Index/TestIndexableField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs 
b/src/Lucene.Net.Tests/Index/TestIndexableField.cs
index 88402dd..b05dd77 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexableField.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs
@@ -218,7 +218,7 @@ namespace Lucene.Net.Index
 
             public TokenStream GetTokenStream(Analyzer analyzer)
             {
-                return GetReaderValue() != null ? analyzer.TokenStream(Name, 
GetReaderValue()) : analyzer.TokenStream(Name, new 
StringReader(GetStringValue()));
+                return GetReaderValue() != null ? 
analyzer.GetTokenStream(Name, GetReaderValue()) : analyzer.GetTokenStream(Name, 
new StringReader(GetStringValue()));
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b5cae3f3/src/Lucene.Net.Tests/Index/TestLongPostings.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestLongPostings.cs 
b/src/Lucene.Net.Tests/Index/TestLongPostings.cs
index 0f06912..840fee7 100644
--- a/src/Lucene.Net.Tests/Index/TestLongPostings.cs
+++ b/src/Lucene.Net.Tests/Index/TestLongPostings.cs
@@ -58,7 +58,7 @@ namespace Lucene.Net.Index
                     continue;
                 }
                 IOException priorException = null;
-                TokenStream ts = a.TokenStream("foo", new StringReader(s));
+                TokenStream ts = a.GetTokenStream("foo", new StringReader(s));
                 try
                 {
                     ITermToBytesRefAttribute termAtt = 
ts.GetAttribute<ITermToBytesRefAttribute>();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b5cae3f3/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs 
b/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
index 355249e..0f87c14 100644
--- a/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
+++ b/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
@@ -190,7 +190,7 @@ namespace Lucene.Net.Index
             IndexWriter w = new IndexWriter(dir, 
NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
             Document doc = new Document();
             IOException priorException = null;
-            TokenStream stream = analyzer.TokenStream("field", new 
StringReader("abcd   "));
+            TokenStream stream = analyzer.GetTokenStream("field", new 
StringReader("abcd   "));
             try
             {
                 stream.Reset(); // TODO: weird to reset before wrapping with 
CachingTokenFilter... correct?

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/b5cae3f3/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs 
b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
index fb5fc39..7ac1dc2 100644
--- a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
@@ -660,7 +660,7 @@ namespace Lucene.Net.Search
                             }
                         }
                         IOException priorException = null;
-                        TokenStream ts = analyzer.TokenStream("ignore", new 
StringReader(term));
+                        TokenStream ts = analyzer.GetTokenStream("ignore", new 
StringReader(term));
                         try
                         {
                             ICharTermAttribute termAttr = 
ts.AddAttribute<ICharTermAttribute>();

Reply via email to