From c96d025f4affac4f82c546dac288fe3123a3d414 Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Thu, 5 Dec 2024 22:37:17 -0700 Subject: [PATCH] SWEEP: Make anonymous class members default to private instead of internal, #715 --- .../Prefix/ContainsPrefixTreeFilter.cs | 10 +- .../Serialized/SerializedDVStrategy.cs | 8 +- .../Dummy/DummyCompressingCodec.cs | 5 +- .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs | 5 +- .../Index/RandomIndexWriter.cs | 8 +- .../Search/QueryUtils.cs | 16 +-- .../Analysis/Core/TestFactories.cs | 8 +- .../Analysis/Core/TestRandomChains.cs | 12 -- .../TestRemoveDuplicatesTokenFilter.cs | 6 +- .../Analysis/Util/TestCharTokenizers.cs | 7 +- .../Range/TestRangeFacetCounts.cs | 40 ++---- .../Taxonomy/Directory/TestAddTaxonomy.cs | 17 +-- .../TestConcurrentFacetedIndexing.cs | 18 +-- .../Directory/TestDirectoryTaxonomyWriter.cs | 6 +- .../Taxonomy/TestCachedOrdinalsReader.cs | 9 +- .../Taxonomy/TestTaxonomyCombined.cs | 29 ++-- .../Taxonomy/TestTaxonomyFacetCounts.cs | 14 +- .../TestTaxonomyFacetSumValueSource.cs | 21 +-- .../WriterCache/TestCharBlockArray.cs | 2 +- .../TestDrillSideways.cs | 39 ++--- .../TestFacetsConfig.cs | 13 +- .../Highlight/HighlighterPhraseTest.cs | 7 +- .../Highlight/HighlighterTest.cs | 17 +-- .../Highlight/OffsetLimitTokenFilterTest.cs | 4 +- .../FastVectorHighlighterTest.cs | 134 +++++++++--------- .../VectorHighlight/FieldQueryTest.cs | 7 +- .../Support/TestJoinUtil.cs | 16 +-- src/Lucene.Net.Tests.Join/TestJoinUtil.cs | 47 +++--- .../TestCustomScoreQuery.cs | 14 +- .../IndexAndTaxonomyReplicationClientTest.cs | 5 +- .../Analysis/TestMockAnalyzer.cs | 4 +- .../Index/TestBinaryDocValuesUpdates.cs | 25 +--- .../Index/TestDirectoryReaderReopen.cs | 2 +- .../Index/TestDocumentWriter.cs | 14 +- .../Index/TestDocumentsWriterDeleteQueue.cs | 2 +- src/Lucene.Net.Tests/Index/TestIndexWriter.cs | 10 +- .../Index/TestIndexWriterExceptions.cs | 12 +- .../Index/TestIndexWriterReader.cs | 2 +- .../Index/TestIndexableField.cs | 2 +- src/Lucene.Net.Tests/Index/TestStressNRT.cs | 4 +- .../Search/TestBooleanScorer.cs | 2 +- .../Search/TestCachingCollector.cs | 2 +- .../TestControlledRealTimeReopenThread.cs | 12 +- src/Lucene.Net.Tests/Search/TestDocIdSet.cs | 2 +- .../Search/TestDocValuesScoring.cs | 2 +- .../Search/TestEarlyTermination.cs | 4 +- .../Search/TestElevationComparator.cs | 4 +- .../Search/TestFilteredQuery.cs | 4 +- .../Search/TestMultiTermQueryRewrites.cs | 2 +- .../Search/TestPositionIncrement.cs | 6 +- .../Search/TestQueryRescorer.cs | 2 +- .../Search/TestRegexpQuery.cs | 2 +- .../Search/TestSearchWithThreads.cs | 2 +- .../Search/TestSearcherManager.cs | 30 ++-- .../Search/TestSloppyPhraseQuery.cs | 11 +- src/Lucene.Net.Tests/Search/TestTermScorer.cs | 7 +- .../Util/Packed/TestEliasFanoDocIdSet.cs | 2 +- .../CompressingStoredFieldsReader.cs | 4 +- .../Codecs/Lucene3x/Lucene3xCodec.cs | 7 +- .../Lucene45/Lucene45DocValuesProducer.cs | 6 +- src/Lucene.Net/Index/DocValues.cs | 18 +-- .../Index/DocumentsWriterPerThread.cs | 4 - src/Lucene.Net/Search/DocIdSetIterator.cs | 12 +- src/Lucene.Net/Search/MultiTermQuery.cs | 38 ++--- src/Lucene.Net/Search/NumericRangeQuery.cs | 4 +- src/Lucene.Net/Search/ScoringRewrite.cs | 16 +-- src/Lucene.Net/Search/SortField.cs | 22 +-- .../Search/Spans/SpanMultiTermQueryWrapper.cs | 10 +- src/Lucene.Net/Util/BytesRefArray.cs | 4 +- src/Lucene.Net/Util/Fst/NoOutputs.cs | 6 +- src/Lucene.Net/Util/RamUsageEstimator.cs | 6 +- 71 files changed, 329 insertions(+), 545 deletions(-) diff --git a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs index f28376ab3c..f8d46a035c 100644 --- a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs @@ -30,7 +30,7 @@ namespace Lucene.Net.Spatial.Prefix /// /// Finds docs where its indexed shape /// the query shape. For use on . - /// + /// /// @lucene.experimental /// public class ContainsPrefixTreeFilter : AbstractPrefixTreeFilter @@ -114,7 +114,7 @@ public ContainsVisitor(ContainsPrefixTreeFilter outerInstance, AtomicReaderConte { combinedSubResults = GetDocs(subCell, acceptContains); } - else if (!outerInstance.m_multiOverlappingIndexedShapes && + else if (!outerInstance.m_multiOverlappingIndexedShapes && subCell.ShapeRel == SpatialRelation.Within) { combinedSubResults = GetLeafDocs(subCell, acceptContains); //recursion @@ -123,7 +123,7 @@ public ContainsVisitor(ContainsPrefixTreeFilter outerInstance, AtomicReaderConte { combinedSubResults = Visit(subCell, acceptContains); } - + if (combinedSubResults is null) { break; @@ -131,7 +131,7 @@ public ContainsVisitor(ContainsPrefixTreeFilter outerInstance, AtomicReaderConte acceptContains = combinedSubResults;//has the 'AND' effect on next iteration } - + // Result: OR the leaf docs with AND of all child results if (combinedSubResults != null) { @@ -331,7 +331,7 @@ public DocIdSetIteratorAnonymousClass(int size, int[] docs) this.docs = docs; } - internal int idx = -1; + private int idx = -1; public override int DocID { diff --git a/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs b/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs index 0d6d82108d..fa72144330 100644 --- a/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs +++ b/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs @@ -258,15 +258,15 @@ public override FunctionValues GetValues(IDictionary context, AtomicReaderContex BinaryDocValues docValues = readerContext.AtomicReader.GetBinaryDocValues(fieldName); - return new FuctionValuesAnonymousClass(this, docValues); + return new FunctionValuesAnonymousClass(this, docValues); } - private sealed class FuctionValuesAnonymousClass : FunctionValues + private sealed class FunctionValuesAnonymousClass : FunctionValues { private readonly ShapeDocValueSource outerInstance; private readonly BinaryDocValues docValues; - public FuctionValuesAnonymousClass(ShapeDocValueSource outerInstance, BinaryDocValues docValues) + public FunctionValuesAnonymousClass(ShapeDocValueSource outerInstance, BinaryDocValues docValues) { // LUCENENET specific - added guard clauses this.outerInstance = outerInstance ?? throw new ArgumentNullException(nameof(outerInstance)); @@ -276,7 +276,7 @@ public FuctionValuesAnonymousClass(ShapeDocValueSource outerInstance, BinaryDocV private int bytesRefDoc = -1; private readonly BytesRef bytesRef = new BytesRef();//scratch - internal bool FillBytes(int doc) + private bool FillBytes(int doc) { if (bytesRefDoc != doc) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs index 8b5dc18493..ddccac190f 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs @@ -32,9 +32,6 @@ public class DummyCompressingCodec : CompressingCodec private sealed class CompressionModeAnonymousClass : CompressionMode { - public CompressionModeAnonymousClass() - { } - public override Compressor NewCompressor() { return DUMMY_COMPRESSOR; @@ -95,4 +92,4 @@ public DummyCompressingCodec() : this(1 << 14, false) { } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs index 6f198afc48..397edb726a 100644 --- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs @@ -49,9 +49,6 @@ public sealed class RAMOnlyPostingsFormat : PostingsFormat private sealed class ComparerAnonymousClass : IComparer #pragma warning restore 659 { - public ComparerAnonymousClass() - { } - public int Compare(BytesRef t1, BytesRef t2) { var b1 = t1.Bytes; @@ -288,7 +285,7 @@ public override PostingsConsumer StartTerm(BytesRef text) return postingsWriter; } - public override IComparer Comparer + public override IComparer Comparer => BytesRef.UTF8SortedAsUnicodeComparer; public override void FinishTerm(BytesRef text, TermStats stats) diff --git a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs index daa030d0ca..43a1175e47 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs @@ -176,7 +176,7 @@ public EnumeratorAnonymousClass(EnumerableAnonymousClass outerIn this.outerInstance = outerInstance; } - internal bool done; + private bool done; private IEnumerable current; public bool MoveNext() @@ -265,7 +265,7 @@ public IEnumerator> GetEnumerator() return new EnumeratorAnonymousClass2(this); } - IEnumerator IEnumerable.GetEnumerator() + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); private sealed class EnumeratorAnonymousClass2 : IEnumerator> @@ -277,7 +277,7 @@ public EnumeratorAnonymousClass2(EnumerableAnonymousClass2 outerInstance) this.outerInstance = outerInstance; } - internal bool done; + private bool done; private IEnumerable current; public bool MoveNext() @@ -519,4 +519,4 @@ public interface ITestPoint { void Apply(string message); } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs index 2a683a5467..1522acb475 100644 --- a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs +++ b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs @@ -84,10 +84,6 @@ public static void CheckHashEquals(Query q) private sealed class QueryAnonymousClass : Query { - public QueryAnonymousClass() - { - } - public override string ToString(string field) { return "My Whacky Query"; @@ -187,13 +183,13 @@ public static IndexSearcher WrapUnderlyingReader(Random random, IndexSearcher s, IndexReader[] readers = new IndexReader[] { edge < 0 ? r : emptyReaders[0], emptyReaders[0], - new FCInvisibleMultiReader(edge < 0 ? emptyReaders[4] : emptyReaders[0], - emptyReaders[0], + new FCInvisibleMultiReader(edge < 0 ? emptyReaders[4] : emptyReaders[0], + emptyReaders[0], 0 == edge ? r : emptyReaders[0]), 0 < edge ? emptyReaders[0] : emptyReaders[7], emptyReaders[0], - new FCInvisibleMultiReader(0 < edge ? emptyReaders[0] : emptyReaders[5], - emptyReaders[0], + new FCInvisibleMultiReader(0 < edge ? emptyReaders[0] : emptyReaders[5], + emptyReaders[0], 0 < edge ? r : emptyReaders[0]) }; @@ -300,7 +296,7 @@ private sealed class CollectorAnonymousClass : ICollector private readonly float maxDiff; private readonly AtomicReader[] lastReader; - public CollectorAnonymousClass(Query q, IndexSearcher s, IList readerContextArray, + public CollectorAnonymousClass(Query q, IndexSearcher s, IList readerContextArray, int skip_op, int[] order, int[] opidx, int[] lastDoc, float maxDiff, AtomicReader[] lastReader) { this.q = q; @@ -503,4 +499,4 @@ public void SetNextReader(AtomicReaderContext context) public bool AcceptsDocsOutOfOrder => false; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs index d8c521b4f4..7dd0157b72 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs @@ -204,9 +204,9 @@ public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextR private sealed class FactoryAnalyzer : Analyzer { - internal readonly TokenizerFactory tokenizer; - internal readonly CharFilterFactory charFilter; - internal readonly TokenFilterFactory tokenfilter; + private readonly TokenizerFactory tokenizer; + private readonly CharFilterFactory charFilter; + private readonly TokenFilterFactory tokenfilter; internal FactoryAnalyzer(TokenizerFactory tokenizer, TokenFilterFactory tokenfilter, CharFilterFactory charFilter) { @@ -243,4 +243,4 @@ protected internal override TextReader InitReader(string fieldName, TextReader r } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs index 9840112734..60c2866988 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs @@ -75,10 +75,6 @@ private interface IPredicate private sealed class PredicateAnonymousClass : IPredicate { - public PredicateAnonymousClass() - { - } - public bool Apply(object[] args) { return true; @@ -171,10 +167,6 @@ static TestRandomChains() private sealed class PredicateAnonymousClass2 : IPredicate { - public PredicateAnonymousClass2() - { - } - public bool Apply(object[] args) { if (Debugging.AssertsEnabled) Debugging.Assert(args.Length == 3); @@ -184,10 +176,6 @@ public bool Apply(object[] args) private sealed class PredicateAnonymousClass3 : IPredicate { - public PredicateAnonymousClass3() - { - } - public bool Apply(object[] args) { if (Debugging.AssertsEnabled) Debugging.Assert(args.Length == 3); diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs index 17886b38a7..8fa4b8a574 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs @@ -64,9 +64,9 @@ public TokenStreamAnonymousClass(IEnumerator toks) posIncAtt = AddAttribute(); } - internal ICharTermAttribute termAtt; - internal IOffsetAttribute offsetAtt; - internal IPositionIncrementAttribute posIncAtt; + private ICharTermAttribute termAtt; + private IOffsetAttribute offsetAtt; + private IPositionIncrementAttribute posIncAtt; public override sealed bool IncrementToken() { if (toks.MoveNext()) diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs index 7a8b011452..fe61d5a768 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs @@ -39,7 +39,7 @@ public class TestCharTokenizers : BaseTokenStreamTestCase { /* - * test to read surrogate pairs without loosing the pairing + * test to read surrogate pairs without loosing the pairing * if the surrogate pair is at the border of the internal IO buffer */ [Test] @@ -261,9 +261,6 @@ public virtual void TestSurrogates() private sealed class AnalyzerAnonymousClass3 : Analyzer { - public AnalyzerAnonymousClass3() - { } - protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { Tokenizer tokenizer = new NumberAndSurrogatePairTokenizer(TEST_VERSION_CURRENT, reader); @@ -290,4 +287,4 @@ protected override bool IsTokenChar(int c) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs index 0576aae967..fba5e06250 100644 --- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs +++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs @@ -226,7 +226,7 @@ public virtual void TestOverlappedEndStart() /// /// Tests single request that mixes Range and non-Range - /// faceting, with and taxonomy. + /// faceting, with and taxonomy. /// [Test] public virtual void TestMixedRangeAndNonRangeTaxonomy() @@ -1052,7 +1052,7 @@ public virtual void TestCustomDoublesValueSource() // Test wants 3 docs in one segment: writer.ForceMerge(1); - var vs = new ValueSourceAnonymousClass(this, doc); + var vs = new ValueSourceAnonymousClass(); FacetsConfig config = new FacetsConfig(); @@ -1076,7 +1076,7 @@ public virtual void TestCustomDoublesValueSource() if (Random.NextBoolean()) { // Sort of silly: - fastMatchFilter = new CachingWrapperFilterAnonymousClass(this, new QueryWrapperFilter(new MatchAllDocsQuery()), filterWasUsed); + fastMatchFilter = new CachingWrapperFilterAnonymousClass(new QueryWrapperFilter(new MatchAllDocsQuery()), filterWasUsed); } else { @@ -1101,7 +1101,7 @@ public virtual void TestCustomDoublesValueSource() Assert.AreEqual(1, s.Search(ddq, 10).TotalHits); // Test drill-sideways after drill-down - DrillSideways ds = new DrillSidewaysAnonymousClass2(this, s, config, (TaxonomyReader)null, vs, ranges, fastMatchFilter); + DrillSideways ds = new DrillSidewaysAnonymousClass2(s, config, (TaxonomyReader)null, vs, ranges, fastMatchFilter); DrillSidewaysResult dsr = ds.Search(ddq, 10); @@ -1114,29 +1114,16 @@ public virtual void TestCustomDoublesValueSource() private sealed class ValueSourceAnonymousClass : ValueSource { - private readonly TestRangeFacetCounts outerInstance; - - private readonly Document doc; - - public ValueSourceAnonymousClass(TestRangeFacetCounts outerInstance, Document doc) - { - this.outerInstance = outerInstance; - this.doc = doc; - } - public override FunctionValues GetValues(IDictionary ignored, AtomicReaderContext ignored2) { - return new DoubleDocValuesAnonymousClass(this); + return new DoubleDocValuesAnonymousClass(); } private sealed class DoubleDocValuesAnonymousClass : DoubleDocValues { - private readonly ValueSourceAnonymousClass outerInstance; - - public DoubleDocValuesAnonymousClass(ValueSourceAnonymousClass outerInstance) + public DoubleDocValuesAnonymousClass() : base(null) { - this.outerInstance = outerInstance; } public override double DoubleVal(int doc) @@ -1159,19 +1146,15 @@ public override string GetDescription() { throw UnsupportedOperationException.Create(); } - } private sealed class CachingWrapperFilterAnonymousClass : CachingWrapperFilter { - private readonly TestRangeFacetCounts outerInstance; - private readonly AtomicBoolean filterWasUsed; - public CachingWrapperFilterAnonymousClass(TestRangeFacetCounts outerInstance, QueryWrapperFilter org, AtomicBoolean filterWasUsed) + public CachingWrapperFilterAnonymousClass(QueryWrapperFilter org, AtomicBoolean filterWasUsed) : base(org) { - this.outerInstance = outerInstance; this.filterWasUsed = filterWasUsed; } @@ -1186,23 +1169,18 @@ protected override DocIdSet CacheImpl(DocIdSetIterator iterator, AtomicReader re private sealed class DrillSidewaysAnonymousClass2 : DrillSideways { - private readonly TestRangeFacetCounts outerInstance; - private readonly ValueSource vs; private readonly DoubleRange[] ranges; private readonly Filter fastMatchFilter; - - public DrillSidewaysAnonymousClass2(TestRangeFacetCounts outerInstance, IndexSearcher indexSearcher, FacetsConfig facetsConfig, TaxonomyReader org, ValueSource valueSource, DoubleRange[] doubleRanges, Filter filter) + public DrillSidewaysAnonymousClass2(IndexSearcher indexSearcher, FacetsConfig facetsConfig, TaxonomyReader org, ValueSource valueSource, DoubleRange[] doubleRanges, Filter filter) : base(indexSearcher, facetsConfig, org) { - this.outerInstance = outerInstance; this.vs = valueSource; this.ranges = doubleRanges; this.fastMatchFilter = filter; } - protected override Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, string[] drillSidewaysDims) { if (Debugging.AssertsEnabled) Debugging.Assert(drillSideways.Length == 1); @@ -1212,4 +1190,4 @@ protected override Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCo protected override bool ScoreSubDocsAtOnce => Random.NextBoolean(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs index 9b5d725f8a..bcc3c18c0c 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs @@ -48,7 +48,7 @@ private void Dotest(int ncats, int range) ThreadJob[] addThreads = new ThreadJob[4]; for (int j = 0; j < addThreads.Length; j++) { - addThreads[j] = new ThreadAnonymousClass(this, range, numCats, tw); + addThreads[j] = new ThreadAnonymousClass(range, numCats, tw); } foreach (ThreadJob t in addThreads) @@ -75,15 +75,12 @@ private void Dotest(int ncats, int range) private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestAddTaxonomy outerInstance; - private int range; private AtomicInt32 numCats; private DirectoryTaxonomyWriter tw; - public ThreadAnonymousClass(TestAddTaxonomy outerInstance, int range, AtomicInt32 numCats, DirectoryTaxonomyWriter tw) + public ThreadAnonymousClass(int range, AtomicInt32 numCats, DirectoryTaxonomyWriter tw) { - this.outerInstance = outerInstance; this.range = range; this.numCats = numCats; this.tw = tw; @@ -107,7 +104,6 @@ public override void Run() } } - private IOrdinalMap randomOrdinalMap() { if (Random.NextBoolean()) @@ -251,7 +247,7 @@ public virtual void TestConcurrency() // again, in parallel -- in the end, no duplicate categories should exist. Directory dest = NewDirectory(); var destTw = new DirectoryTaxonomyWriter(dest); - var t = new ThreadAnonymousClass2(this, numCategories, destTw); + var t = new ThreadAnonymousClass2(numCategories, destTw); t.Start(); IOrdinalMap map = new MemoryOrdinalMap(); @@ -277,14 +273,11 @@ public virtual void TestConcurrency() private sealed class ThreadAnonymousClass2 : ThreadJob { - private readonly TestAddTaxonomy outerInstance; - private readonly int numCategories; private readonly DirectoryTaxonomyWriter destTW; - public ThreadAnonymousClass2(TestAddTaxonomy outerInstance, int numCategories, DirectoryTaxonomyWriter destTW) + public ThreadAnonymousClass2(int numCategories, DirectoryTaxonomyWriter destTW) { - this.outerInstance = outerInstance; this.numCategories = numCategories; this.destTW = destTW; } @@ -306,4 +299,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs index a2d0c74ca0..ce33e095e0 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs @@ -45,22 +45,20 @@ public class TestConcurrentFacetedIndexing : FacetTestCase private sealed class TaxonomyWriterCacheAnonymousClass : ITaxonomyWriterCache { - public TaxonomyWriterCacheAnonymousClass() - { - } - - public void Dispose() { } + public int Get(FacetLabel categoryPath) { return -1; } + public bool Put(FacetLabel categoryPath, int ordinal) { return true; } + public bool IsFull => true; public void Clear() @@ -119,7 +117,7 @@ public virtual void TestConcurrency() for (int i = 0; i < indexThreads.Length; i++) { - indexThreads[i] = new ThreadAnonymousClass(this, numDocs, values, iw, tw, config); + indexThreads[i] = new ThreadAnonymousClass(numDocs, values, iw, tw, config); } foreach (ThreadJob t in indexThreads) @@ -167,17 +165,14 @@ public virtual void TestConcurrency() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestConcurrentFacetedIndexing outerInstance; - private readonly AtomicInt32 numDocs; private readonly ConcurrentDictionary values; private readonly IndexWriter iw; private readonly DirectoryTaxonomyWriter tw; private readonly FacetsConfig config; - public ThreadAnonymousClass(TestConcurrentFacetedIndexing outerInstance, AtomicInt32 numDocs, ConcurrentDictionary values, IndexWriter iw, DirectoryTaxonomyWriter tw, FacetsConfig config) + public ThreadAnonymousClass(AtomicInt32 numDocs, ConcurrentDictionary values, IndexWriter iw, DirectoryTaxonomyWriter tw, FacetsConfig config) { - this.outerInstance = outerInstance; this.numDocs = numDocs; this.values = values; this.iw = iw; @@ -185,7 +180,6 @@ public ThreadAnonymousClass(TestConcurrentFacetedIndexing outerInstance, AtomicI this.config = config; } - public override void Run() { Random random = Random; @@ -220,4 +214,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs index 3b7607de78..dbc100138f 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs @@ -59,20 +59,22 @@ private sealed class TaxonomyWriterCacheAnonymousClass : ITaxonomyWriterCache public void Dispose() { } + public int Get(FacetLabel categoryPath) { return -1; } + public bool Put(FacetLabel categoryPath, int ordinal) { return true; } + public bool IsFull => true; public void Clear() { } - } [Test] @@ -576,4 +578,4 @@ public virtual void TestReplaceTaxoWithLargeTaxonomy() targetTaxoDir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs index b136d6946c..804dd3d543 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs @@ -60,7 +60,7 @@ public virtual void TestWithThreads() ThreadJob[] threads = new ThreadJob[3]; for (int i = 0; i < threads.Length; i++) { - threads[i] = new ThreadAnonymousClass(this, "CachedOrdsThread-" + i, reader, ordsReader); + threads[i] = new ThreadAnonymousClass("CachedOrdsThread-" + i, reader, ordsReader); } long ramBytesUsed = 0; @@ -83,15 +83,12 @@ public virtual void TestWithThreads() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestCachedOrdinalsReader outerInstance; - private readonly DirectoryReader reader; private readonly CachedOrdinalsReader ordsReader; - public ThreadAnonymousClass(TestCachedOrdinalsReader outerInstance, string threadName, DirectoryReader reader, CachedOrdinalsReader ordsReader) + public ThreadAnonymousClass(string threadName, DirectoryReader reader, CachedOrdinalsReader ordsReader) : base(threadName) { - this.outerInstance = outerInstance; this.reader = reader; this.ordsReader = ordsReader; } @@ -112,4 +109,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs index 9aec78e747..2d1ac3580e 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs @@ -59,7 +59,7 @@ public class TestTaxonomyCombined : FacetTestCase }; /// - /// When adding the above categories with ITaxonomyWriter.AddCategory(), + /// When adding the above categories with ITaxonomyWriter.AddCategory(), /// the following paths are expected to be returned: /// (note that currently the full path is not returned, and therefore /// not tested - rather, just the last component, the ordinal, is returned @@ -200,7 +200,7 @@ public virtual void TestWriterTwice() FillTaxonomy(tw); // run fillTaxonomy again - this will try to add the same categories // again, and check that we see the same ordinal paths again, not - // different ones. + // different ones. FillTaxonomy(tw); // Let's check the number of categories again, to see that no // extraneous categories were created: @@ -271,7 +271,7 @@ public virtual void TestWriterTwice3() /// Another set of tests for the writer, which don't use an array and /// try to distill the different cases, and therefore may be more helpful /// for debugging a problem than testWriter() which is hard to know why - /// or where it failed. + /// or where it failed. /// [Test] public virtual void TestWriterSimpler() @@ -424,7 +424,7 @@ public virtual void TestReaderBasic() /// looking at the category string paths (where the parentage is obvious). /// Note that after testReaderBasic(), we already know we can trust the /// ordinal <=> category conversions. - /// + /// /// Note: At the moment, the parent methods in the reader are deprecated, /// but this does not mean they should not be tested! Until they are /// removed (*if* they are removed), these tests should remain to see @@ -474,11 +474,11 @@ public virtual void TestReaderParent() /// string paths using a TaxonomyReader (where the parentage is obvious). /// Note that after testReaderBasic(), we already know we can trust the /// ordinal <=> category conversions from TaxonomyReader. - /// + /// /// The difference between testWriterParent1 and testWriterParent2 is that /// the former closes the taxonomy writer before reopening it, while the /// latter does not. - /// + /// /// This test code is virtually identical to that of testReaderParent(). /// [Test] @@ -826,7 +826,7 @@ public virtual void TestTaxonomyReaderRefreshRaces() indexDirBase.Dispose(); } - + private void AssertConsistentYoungestChild(FacetLabel abPath, int abOrd, int abYoungChildBase1, int abYoungChildBase2, int retry, int numCategories) { var indexDir = new SlowRAMDirectory(-1, null); // no slowness for intialization @@ -848,7 +848,7 @@ private void AssertConsistentYoungestChild(FacetLabel abPath, int abOrd, int abY Exception[] error = new Exception[] { null }; int[] retrieval = new int[] { 0 }; - var thread = new ThreadAnonymousClass(this, abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, tr, stop, error, retrieval); + var thread = new ThreadAnonymousClass(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, tr, stop, error, retrieval); thread.Start(); indexDir.SetSleepMillis(1); // some delay for refresh @@ -867,8 +867,6 @@ private void AssertConsistentYoungestChild(FacetLabel abPath, int abOrd, int abY private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestTaxonomyCombined outerInstance; - private readonly FacetLabel abPath; private readonly int abOrd; private readonly int abYoungChildBase1; @@ -879,10 +877,9 @@ private sealed class ThreadAnonymousClass : ThreadJob private readonly Exception[] error; private readonly int[] retrieval; - public ThreadAnonymousClass(TestTaxonomyCombined outerInstance, FacetLabel abPath, int abOrd, int abYoungChildBase1, int abYoungChildBase2, int retry, DirectoryTaxonomyReader tr, AtomicBoolean stop, Exception[] error, int[] retrieval) + public ThreadAnonymousClass(FacetLabel abPath, int abOrd, int abYoungChildBase1, int abYoungChildBase2, int retry, DirectoryTaxonomyReader tr, AtomicBoolean stop, Exception[] error, int[] retrieval) : base("Child Arrays Verifier") { - this.outerInstance = outerInstance; this.abPath = abPath; this.abOrd = abOrd; this.abYoungChildBase1 = abYoungChildBase1; @@ -926,8 +923,8 @@ private void AssertChildrenArrays(ParallelTaxonomyArrays ca, int retry, int retr } /// - /// Grab the stack trace into a string since the exception was thrown in a thread and we want the assert - /// outside the thread to show the stack trace in case of failure. + /// Grab the stack trace into a string since the exception was thrown in a thread and we want the assert + /// outside the thread to show the stack trace in case of failure. /// private string stackTraceStr(Exception error) { @@ -947,7 +944,7 @@ private string stackTraceStr(Exception error) /// Note that this test obviously doesn't cover all the different /// concurrency scenarios, all different methods, and so on. We may /// want to write more tests of this sort. - /// + /// /// This test simulates what would happen when there are two separate /// processes, one doing indexing, and the other searching, and each opens /// its own object (with obviously no connection between the objects) using @@ -1216,4 +1213,4 @@ public virtual void TestNrt() // using the same object (simulating threads) or different objects // (simulating processes). } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs index 9056720841..206ef6f1b8 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs @@ -287,7 +287,7 @@ public virtual void TestReallyNoNormsForDrillDown() Store.Directory dir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - iwc.SetSimilarity(new PerFieldSimilarityWrapperAnonymousClass(this)); + iwc.SetSimilarity(new PerFieldSimilarityWrapperAnonymousClass()); ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE); RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc); FacetsConfig config = new FacetsConfig(); @@ -301,15 +301,7 @@ public virtual void TestReallyNoNormsForDrillDown() private sealed class PerFieldSimilarityWrapperAnonymousClass : PerFieldSimilarityWrapper { - private readonly TestTaxonomyFacetCounts outerInstance; - - public PerFieldSimilarityWrapperAnonymousClass(TestTaxonomyFacetCounts outerInstance) - { - this.outerInstance = outerInstance; - sim = new DefaultSimilarity(); - } - - private readonly Similarity sim; + private readonly Similarity sim = new DefaultSimilarity(); public override Similarity Get(string name) { @@ -855,4 +847,4 @@ public virtual void TestRandom() IOUtils.Dispose(w, tw, searcher.IndexReader, tr, indexDir, taxoDir); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs index f0a25881ab..01628b91fb 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs @@ -350,7 +350,7 @@ public virtual void TestWithScore() DirectoryReader r = DirectoryReader.Open(iw, true); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); - ValueSource valueSource = new ValueSourceAnonymousClass(this); + ValueSource valueSource = new ValueSourceAnonymousClass(); FacetsCollector fc = new FacetsCollector(true); // score documents by their 'price' field - makes asserting the correct counts for the categories easier @@ -365,35 +365,23 @@ public virtual void TestWithScore() private sealed class ValueSourceAnonymousClass : ValueSource { - private readonly TestTaxonomyFacetSumValueSource outerInstance; - - public ValueSourceAnonymousClass(TestTaxonomyFacetSumValueSource outerInstance) - { - this.outerInstance = outerInstance; - } - public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext) { Scorer scorer = (Scorer)context["scorer"]; if (Debugging.AssertsEnabled) Debugging.Assert(scorer != null); - return new DoubleDocValuesAnonymousClass(this, scorer); + return new DoubleDocValuesAnonymousClass(scorer); } private sealed class DoubleDocValuesAnonymousClass : DoubleDocValues { - private readonly ValueSourceAnonymousClass outerInstance; - private readonly Scorer scorer; - public DoubleDocValuesAnonymousClass(ValueSourceAnonymousClass outerInstance, Scorer scorer) + public DoubleDocValuesAnonymousClass(Scorer scorer) : base(null) //todo: value source { - this.outerInstance = outerInstance; this.scorer = scorer; } - - public override double DoubleVal(int document) { try @@ -411,6 +399,7 @@ public override bool Equals(object o) { return o == this; } + public override int GetHashCode() { return RuntimeHelpers.GetHashCode(this); @@ -603,4 +592,4 @@ public virtual void TestRandom() IOUtils.Dispose(w, tw, searcher.IndexReader, tr, indexDir, taxoDir); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs index 295b01afce..ecf2d317bb 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs @@ -313,7 +313,7 @@ public virtual void TestSpanAppendableInterface() private sealed class CharSequenceAnonymousClass : ICharSequence { - private string longTestString; + private readonly string longTestString; // LUCENENET: made readonly public CharSequenceAnonymousClass(string longTestString) { diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs index 4ff35b84b9..79d3332f59 100644 --- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs +++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs @@ -127,7 +127,7 @@ public virtual void TestBasic() // case: drill-down on a single field; in this // case the drill-sideways + drill-down counts == - // drill-down of just the query: + // drill-down of just the query: DrillDownQuery ddq = new DrillDownQuery(config); ddq.Add("Author", "Lisa"); DrillSidewaysResult r = ds.Search(null, ddq, 10); @@ -742,7 +742,7 @@ public virtual void TestRandom() { Console.WriteLine(" only-even filter"); } - filter = new FilterAnonymousClass(this); + filter = new FilterAnonymousClass(); } else { @@ -752,7 +752,7 @@ public virtual void TestRandom() // Verify docs are always collected in order. If we // had an AssertingScorer it could catch it when // Weight.scoresDocsOutOfOrder lies!: - new DrillSideways(s, config, tr).Search(ddq, new CollectorAnonymousClass(this)); + new DrillSideways(s, config, tr).Search(ddq, new CollectorAnonymousClass()); // Also separately verify that DS respects the // scoreSubDocsAtOnce method, to ensure that all @@ -763,7 +763,7 @@ public virtual void TestRandom() // drill-down values, because in that case it's // easily possible for one of the DD terms to be on // a future docID: - new DrillSidewaysAnonymousClass(this, s, config, tr) + new DrillSidewaysAnonymousClass(s, config, tr) .Search(ddq, new AssertingSubDocsAtOnceCollector()); } @@ -810,13 +810,6 @@ public virtual void TestRandom() private sealed class FilterAnonymousClass : Filter { - private readonly TestDrillSideways outerInstance; - - public FilterAnonymousClass(TestDrillSideways outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { int maxDoc = context.Reader.MaxDoc; @@ -835,14 +828,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo private sealed class CollectorAnonymousClass : ICollector { - private readonly TestDrillSideways outerInstance; - - public CollectorAnonymousClass(TestDrillSideways outerInstance) - { - this.outerInstance = outerInstance; - } - - internal int lastDocID; + private int lastDocID; public void SetScorer(Scorer scorer) { @@ -864,12 +850,9 @@ public void SetNextReader(AtomicReaderContext context) private sealed class DrillSidewaysAnonymousClass : DrillSideways { - private readonly TestDrillSideways outerInstance; - - public DrillSidewaysAnonymousClass(TestDrillSideways outerInstance, IndexSearcher s, FacetsConfig config, TaxonomyReader tr) + public DrillSidewaysAnonymousClass(IndexSearcher s, FacetsConfig config, TaxonomyReader tr) : base(s, config, tr) { - this.outerInstance = outerInstance; } protected override bool ScoreSubDocsAtOnce => true; @@ -967,7 +950,7 @@ private int[] GetTopNOrds(int[] counts, string[] values, int topN) // Naive (on purpose, to reduce bug in tester/gold): // sort all ids, then return top N slice: - new InPlaceMergeSorterAnonymousClass(this, counts, values, ids).Sort(0, ids.Length); + new InPlaceMergeSorterAnonymousClass(counts, values, ids).Sort(0, ids.Length); if (topN > ids.Length) { @@ -991,21 +974,17 @@ private int[] GetTopNOrds(int[] counts, string[] values, int topN) private sealed class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter { - private readonly TestDrillSideways outerInstance; - private readonly int[] counts; private readonly string[] values; private readonly int[] ids; - public InPlaceMergeSorterAnonymousClass(TestDrillSideways outerInstance, int[] counts, string[] values, int[] ids) + public InPlaceMergeSorterAnonymousClass(int[] counts, string[] values, int[] ids) { - this.outerInstance = outerInstance; this.counts = counts; this.values = values; this.ids = ids; } - protected override void Swap(int i, int j) { int id = ids[i]; @@ -1353,4 +1332,4 @@ public void TestScorer() IOUtils.Dispose(searcher.IndexReader, taxoReader, taxoWriter, dir, taxoDir); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs b/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs index 1ba8593c9a..69f72e24c3 100644 --- a/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs +++ b/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs @@ -98,25 +98,18 @@ public virtual void TestAddSameDocTwice() } /// - /// LUCENE-5479 + /// LUCENE-5479 /// [Test] public virtual void TestCustomDefault() { - FacetsConfig config = new FacetsConfigAnonymousClass(this); + FacetsConfig config = new FacetsConfigAnonymousClass(); Assert.IsTrue(config.GetDimConfig("foobar").IsHierarchical); } private sealed class FacetsConfigAnonymousClass : FacetsConfig { - private readonly TestFacetsConfig outerInstance; - - public FacetsConfigAnonymousClass(TestFacetsConfig outerInstance) - { - this.outerInstance = outerInstance; - } - protected override DimConfig DefaultDimConfig { get @@ -128,4 +121,4 @@ protected override DimConfig DefaultDimConfig } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs index f94e4cfb1b..a81d5edd29 100644 --- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs +++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs @@ -111,7 +111,7 @@ public void TestConcurrentSpan() new SpanTermQuery(new Term(FIELD, "fox")), new SpanTermQuery(new Term(FIELD, "jumped")) }, 0, true); FixedBitSet bitset = new FixedBitSet(indexReader.MaxDoc); - indexSearcher.Search(phraseQuery, new ConcurrentSpanCollectorAnonymousClass(this, bitset)); + indexSearcher.Search(phraseQuery, new ConcurrentSpanCollectorAnonymousClass(bitset)); assertEquals(1, bitset.Cardinality); int maxDoc = indexReader.MaxDoc; @@ -138,11 +138,10 @@ public void TestConcurrentSpan() private sealed class ConcurrentSpanCollectorAnonymousClass : ICollector { - private readonly HighlighterPhraseTest outerInstance; private readonly FixedBitSet bitset; - public ConcurrentSpanCollectorAnonymousClass(HighlighterPhraseTest outerInstance, FixedBitSet bitset) + + public ConcurrentSpanCollectorAnonymousClass(FixedBitSet bitset) { - this.outerInstance = outerInstance; this.bitset = bitset; } diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs index 23b608f53f..0644f95336 100644 --- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs +++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs @@ -1511,7 +1511,7 @@ public void TestEncoding() // for // highlighting but scores a single fragment for selection - Highlighter highlighter = new Highlighter(this, new SimpleHTMLEncoder(), new TestEncodingScorerAnonymousClass(this)); + Highlighter highlighter = new Highlighter(this, new SimpleHTMLEncoder(), new TestEncodingScorerAnonymousClass()); highlighter.TextFragmenter = (new SimpleFragmenter(2000)); TokenStream tokenStream = analyzer.GetTokenStream(FIELD_NAME, rawDocContent); @@ -1546,13 +1546,6 @@ public void TestEncoding() private sealed class TestEncodingScorerAnonymousClass : IScorer { - private readonly HighlighterTest outerInstance; - - public TestEncodingScorerAnonymousClass(HighlighterTest outerInstance) - { - this.outerInstance = outerInstance; - } - public void StartFragment(TextFragment newFragment) { } @@ -1657,8 +1650,8 @@ public TS2TokenStreamAnonymousClass() iter = lst.GetEnumerator(); } - IEnumerator iter; - internal IList lst; + private IEnumerator iter; + private readonly IList lst; // LUCENENET: made readonly private readonly ICharTermAttribute termAtt; private readonly IPositionIncrementAttribute posIncrAtt; private readonly IOffsetAttribute offsetAtt; @@ -1719,8 +1712,8 @@ public TS2aTokenStreamAnonymousClass() iter = lst.GetEnumerator(); } - IEnumerator iter; - internal IList lst; + private IEnumerator iter; + private readonly IList lst; // LUCENENET: made readonly private readonly ICharTermAttribute termAtt; private readonly IPositionIncrementAttribute posIncrAtt; private readonly IOffsetAttribute offsetAtt; diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs index a99560b94e..d2386d72a2 100644 --- a/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs +++ b/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs @@ -27,7 +27,7 @@ public class OffsetLimitTokenFilterTest : BaseTokenStreamTestCase [Test] public void TestFilter() { - // we disable MockTokenizer checks because we will forcefully limit the + // we disable MockTokenizer checks because we will forcefully limit the // tokenstream and call end() before incrementToken() returns false. MockTokenizer stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), @@ -59,7 +59,7 @@ private sealed class AnalyzerAnonymousClass : Analyzer protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); - tokenizer.EnableChecks = (false); + tokenizer.EnableChecks = false; return new TokenStreamComponents(tokenizer, new OffsetLimitTokenFilter(tokenizer, 10)); } } diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs index c465363e96..4be75a1bfd 100644 --- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs +++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs @@ -41,9 +41,9 @@ public void TestSimpleHighlightTest() IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); Field field = new Field("field", "This is a test where foo is highlighed and should be highlighted", type); @@ -54,8 +54,8 @@ public void TestSimpleHighlightTest() IndexReader reader = DirectoryReader.Open(writer, true); int docId = 0; FieldQuery fieldQuery = highlighter.GetFieldQuery(new TermQuery(new Term("field", "foo")), reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 54, 1); - // highlighted results are centered + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 54, 1); + // highlighted results are centered assertEquals("This is a test where foo is highlighed and should be highlighted", bestFragments[0]); bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 52, 1); assertEquals("This is a test where foo is highlighed and should be", bestFragments[0]); @@ -73,9 +73,9 @@ public void TestPhraseHighlightLongTextTest() IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); Field text = new Field("text", "Netscape was the general name for a series of web browsers originally produced by Netscape Communications Corporation, now a subsidiary of AOL The original browser was once the dominant browser in terms of usage share, but as a result of the first browser war it lost virtually all of its share to Internet Explorer Netscape was discontinued and support for all Netscape browsers and client products was terminated on March 1, 2008 Netscape Navigator was the name of Netscape\u0027s web browser from versions 1.0 through 4.8 The first beta release versions of the browser were released in 1994 and known as Mosaic and then Mosaic Netscape until a legal challenge from the National Center for Supercomputing Applications (makers of NCSA Mosaic, which many of Netscape\u0027s founders used to develop), led to the name change to Netscape Navigator The company\u0027s name also changed from Mosaic Communications Corporation to Netscape Communications Corporation The browser was easily the most advanced...", type); @@ -84,13 +84,13 @@ public void TestPhraseHighlightLongTextTest() FastVectorHighlighter highlighter = new FastVectorHighlighter(); IndexReader reader = DirectoryReader.Open(writer, true); int docId = 0; - String field = "text"; + string field = "text"; { BooleanQuery query = new BooleanQuery(); query.Add(new TermQuery(new Term(field, "internet")), Occur.MUST); query.Add(new TermQuery(new Term(field, "explorer")), Occur.MUST); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 128, 1); // highlighted results are centered assertEquals(1, bestFragments.Length); @@ -102,7 +102,7 @@ public void TestPhraseHighlightLongTextTest() query.Add(new Term(field, "internet")); query.Add(new Term(field, "explorer")); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 128, 1); // highlighted results are centered assertEquals(1, bestFragments.Length); @@ -121,9 +121,9 @@ public void TestPhraseHighlightTest() IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); Field longTermField = new Field("long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighed and should be highlighted", type); Field noLongTermField = new Field("no_long_term", "This is a test where foo is highlighed and should be highlighted", type); @@ -134,14 +134,14 @@ public void TestPhraseHighlightTest() FastVectorHighlighter highlighter = new FastVectorHighlighter(); IndexReader reader = DirectoryReader.Open(writer, true); int docId = 0; - String field = "no_long_term"; + string field = "no_long_term"; { BooleanQuery query = new BooleanQuery(); query.Add(new TermQuery(new Term(field, "test")), Occur.MUST); query.Add(new TermQuery(new Term(field, "foo")), Occur.MUST); query.Add(new TermQuery(new Term(field, "highlighed")), Occur.MUST); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 18, 1); // highlighted results are centered assertEquals(1, bestFragments.Length); @@ -153,12 +153,12 @@ public void TestPhraseHighlightTest() pq.Add(new Term(field, "test")); pq.Add(new Term(field, "foo")); pq.Add(new Term(field, "highlighed")); - pq.Slop = (5); + pq.Slop = 5; query.Add(new TermQuery(new Term(field, "foo")), Occur.MUST); query.Add(pq, Occur.MUST); query.Add(new TermQuery(new Term(field, "highlighed")), Occur.MUST); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 18, 1); // highlighted results are centered assertEquals(0, bestFragments.Length); @@ -174,9 +174,9 @@ public void TestPhraseHighlightTest() query.Add(new Term(field, "test")); query.Add(new Term(field, "foo")); query.Add(new Term(field, "highlighed")); - query.Slop = (3); + query.Slop = 3; FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 18, 1); // highlighted results are centered assertEquals(0, bestFragments.Length); @@ -192,9 +192,9 @@ public void TestPhraseHighlightTest() query.Add(new Term(field, "test")); query.Add(new Term(field, "foo")); query.Add(new Term(field, "highlighted")); - query.Slop = (30); + query.Slop = 30; FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 18, 1); assertEquals(0, bestFragments.Length); } @@ -204,7 +204,7 @@ public void TestPhraseHighlightTest() pq.Add(new Term(field, "test")); pq.Add(new Term(field, "foo")); pq.Add(new Term(field, "highlighed")); - pq.Slop = (5); + pq.Slop = 5; BooleanQuery inner = new BooleanQuery(); inner.Add(pq, Occur.MUST); inner.Add(new TermQuery(new Term(field, "foo")), Occur.MUST); @@ -212,7 +212,7 @@ public void TestPhraseHighlightTest() query.Add(pq, Occur.MUST); query.Add(new TermQuery(new Term(field, "highlighed")), Occur.MUST); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 18, 1); assertEquals(0, bestFragments.Length); @@ -231,7 +231,7 @@ public void TestPhraseHighlightTest() query.Add(new TermQuery(new Term(field, "foo")), Occur.MUST); query.Add(new TermQuery(new Term(field, "highlighed")), Occur.MUST); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, field, 18, 1); // highlighted results are centered assertEquals(1, bestFragments.Length); @@ -250,9 +250,9 @@ public void TestBoostedPhraseHighlightTest() IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); StringBuilder text = new StringBuilder(); text.append("words words junk junk junk junk junk junk junk junk highlight junk junk junk junk together junk "); @@ -278,13 +278,13 @@ public void TestBoostedPhraseHighlightTest() // This mimics what some query parsers do to <"highlight words together"> BooleanQuery phrase = new BooleanQuery(); phrase.Add(clause("text", "highlight", "words", "together"), Occur.MUST); - phrase.Boost = (100); - // Now combine those results in a boolean query which should pull the phrases to the front of the list of fragments + phrase.Boost = 100; + // Now combine those results in a boolean query which should pull the phrases to the front of the list of fragments BooleanQuery query = new BooleanQuery(); query.Add(phrase, Occur.MUST); query.Add(phrase, Occur.SHOULD); FieldQuery fieldQuery = new FieldQuery(query, reader, true, false); - String fragment = highlighter.GetBestFragment(fieldQuery, reader, 0, "text", 100); + string fragment = highlighter.GetBestFragment(fieldQuery, reader, 0, "text", 100); assertEquals("junk junk junk junk junk junk junk junk highlight words together junk junk junk junk junk junk junk junk", fragment); reader.Dispose(); @@ -298,11 +298,11 @@ public void TestCommonTermsQueryHighlight() Directory dir = NewDirectory(); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); FieldType type = new FieldType(TextField.TYPE_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); - String[] texts = { + string[] texts = { "Hello this is a piece of text that is very long and contains too much preamble and the meat is really here which says kennedy has been shot", "This piece of text refers to Kennedy at the beginning then has a longer piece of text that is very long in the middle and finally ends with another reference to Kennedy", "JFK has been shot", "John Kennedy has been shot", @@ -326,7 +326,7 @@ public void TestCommonTermsQueryHighlight() TopDocs hits = searcher.Search(query, 10); assertEquals(2, hits.TotalHits); FieldQuery fieldQuery = highlighter.GetFieldQuery(query, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, hits.ScoreDocs[0].Doc, "field", 1000, 1); + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, hits.ScoreDocs[0].Doc, "field", 1000, 1); assertEquals("This piece of text refers to Kennedy at the beginning then has a longer piece of text that is very long in the middle and finally ends with another reference to Kennedy", bestFragments[0]); fieldQuery = highlighter.GetFieldQuery(query, reader); @@ -457,9 +457,9 @@ public void TestMultiValuedSortByScore() IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); doc.Add(new Field("field", "zero if naught", type)); // The first two fields contain the best match doc.Add(new Field("field", "hero of legend", type)); // but total a lower score (3) than the bottom @@ -470,10 +470,10 @@ public void TestMultiValuedSortByScore() FastVectorHighlighter highlighter = new FastVectorHighlighter(); ScoreOrderFragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder(); - fragmentsBuilder.IsDiscreteMultiValueHighlighting = (true); + fragmentsBuilder.IsDiscreteMultiValueHighlighting = true; IndexReader reader = DirectoryReader.Open(writer, true); - String[] preTags = new String[] { "" }; - String[] postTags = new String[] { "" }; + string[] preTags = new string[] { "" }; + string[] postTags = new string[] { "" }; IEncoder encoder = new DefaultEncoder(); int docId = 0; BooleanQuery query = new BooleanQuery(); @@ -485,7 +485,7 @@ public void TestMultiValuedSortByScore() foreach (IFragListBuilder fragListBuilder in new IFragListBuilder[] { new SimpleFragListBuilder(), new WeightedFragListBuilder() }) { - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 20, 1, + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 20, 1, fragListBuilder, fragmentsBuilder, preTags, postTags, encoder); assertEquals("hero of legend", bestFragments[0]); bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 28, 1, @@ -508,12 +508,12 @@ public void TestBooleanPhraseWithSynonym() IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_NOT_STORED); - type.StoreTermVectorOffsets = (true); - type.StoreTermVectorPositions = (true); - type.StoreTermVectors = (true); + type.StoreTermVectorOffsets = true; + type.StoreTermVectorPositions = true; + type.StoreTermVectors = true; type.Freeze(); Token syn = new Token("httpwwwfacebookcom", 6, 29); - syn.PositionIncrement = (0); + syn.PositionIncrement = 0; CannedTokenStream ts = new CannedTokenStream( new Token("test", 0, 4), new Token("http", 6, 10), @@ -539,7 +539,7 @@ public void TestBooleanPhraseWithSynonym() pq.Add(new Term("field", "facebook")); pq.Add(new Term("field", "com")); FieldQuery fieldQuery = highlighter.GetFieldQuery(pq, reader); - String[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 54, 1); + string[] bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 54, 1); assertEquals("Test: http://www.facebook.com", bestFragments[0]); // query2: match @@ -566,23 +566,23 @@ public void TestBooleanPhraseWithSynonym() dir.Dispose(); } - private void matchedFieldsTestCase(String fieldValue, String expected, params Query[] queryClauses) + private void matchedFieldsTestCase(string fieldValue, string expected, params Query[] queryClauses) { matchedFieldsTestCase(true, true, fieldValue, expected, queryClauses); } - private void matchedFieldsTestCase(bool useMatchedFields, bool fieldMatch, String fieldValue, String expected, params Query[] queryClauses) + private void matchedFieldsTestCase(bool useMatchedFields, bool fieldMatch, string fieldValue, string expected, params Query[] queryClauses) { Document doc = new Document(); FieldType stored = new FieldType(TextField.TYPE_STORED); - stored.StoreTermVectorOffsets = (true); - stored.StoreTermVectorPositions = (true); - stored.StoreTermVectors = (true); + stored.StoreTermVectorOffsets = true; + stored.StoreTermVectorPositions = true; + stored.StoreTermVectors = true; stored.Freeze(); FieldType matched = new FieldType(TextField.TYPE_NOT_STORED); - matched.StoreTermVectorOffsets = (true); - matched.StoreTermVectorPositions = (true); - matched.StoreTermVectors = (true); + matched.StoreTermVectorOffsets = true; + matched.StoreTermVectorPositions = true; + matched.StoreTermVectors = true; matched.Freeze(); doc.Add(new Field("field", fieldValue, stored)); // Whitespace tokenized with English stop words doc.Add(new Field("field_exact", fieldValue, matched)); // Whitespace tokenized without stop words @@ -606,8 +606,8 @@ private void matchedFieldsTestCase(bool useMatchedFields, bool fieldMatch, Strin IFragListBuilder fragListBuilder = new SimpleFragListBuilder(); IFragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder(); IndexReader reader = DirectoryReader.Open(writer, true); - String[] preTags = new String[] { "" }; - String[] postTags = new String[] { "" }; + string[] preTags = new string[] { "" }; + string[] postTags = new string[] { "" }; IEncoder encoder = new DefaultEncoder(); int docId = 0; BooleanQuery query = new BooleanQuery(); @@ -616,10 +616,10 @@ private void matchedFieldsTestCase(bool useMatchedFields, bool fieldMatch, Strin query.Add(clause, Occur.MUST); } FieldQuery fieldQuery = new FieldQuery(query, reader, true, fieldMatch); - String[] bestFragments; + string[] bestFragments; if (useMatchedFields) { - ISet matchedFields = new JCG.HashSet(); + ISet matchedFields = new JCG.HashSet(); matchedFields.Add("field"); matchedFields.Add("field_exact"); matchedFields.Add("field_super_exact"); @@ -644,7 +644,7 @@ private void matchedFieldsTestCase(bool useMatchedFields, bool fieldMatch, Strin private sealed class AnalyzerWrapperAnonymousClass : AnalyzerWrapper { - IDictionary fieldAnalyzers = new JCG.SortedDictionary(StringComparer.Ordinal); + private readonly IDictionary fieldAnalyzers = new JCG.SortedDictionary(StringComparer.Ordinal); #pragma warning disable 612, 618 // LUCENENET NOTE: Class calls obsolete (default) constructor public AnalyzerWrapperAnonymousClass() @@ -664,12 +664,12 @@ protected override Analyzer GetWrappedAnalyzer(string fieldName) } } - private Query clause(String field, params String[] terms) + private Query clause(string field, params string[] terms) { return clause(field, 1, terms); } - private Query clause(String field, float boost, params String[] terms) + private Query clause(string field, float boost, params string[] terms) { Query q; if (terms.Length == 1) @@ -679,20 +679,20 @@ private Query clause(String field, float boost, params String[] terms) else { PhraseQuery pq = new PhraseQuery(); - foreach (String term in terms) + foreach (string term in terms) { pq.Add(new Term(field, term)); } q = pq; } - q.Boost = (boost); + q.Boost = boost; return q; } - private static Token token(String term, int posInc, int startOffset, int endOffset) + private static Token token(string term, int posInc, int startOffset, int endOffset) { Token t = new Token(term, startOffset, endOffset); - t.PositionIncrement = (posInc); + t.PositionIncrement = posInc; return t; } } diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs index 9b82455904..2019054d03 100644 --- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs +++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs @@ -618,7 +618,7 @@ public void TestQueryPhraseMap2PhrasesFields() /* * ...terminal - * + * * a-b-c- * +-d- * b-c-d- @@ -683,7 +683,7 @@ public void TestQueryPhraseMapOverlapPhrases() /* * ...terminal - * + * * a-b- * +-c- */ @@ -726,7 +726,7 @@ public void TestQueryPhraseMapOverlapPhrases2() /* * ...terminal - * + * * a-a-a- * +-a- * +-a- @@ -990,6 +990,7 @@ private void defgMultiTermQueryTest(Query query) phraseCandidate.Add(new TermInfo("defg", 0, 12, 0, 1)); assertNotNull(fq.SearchPhrase(F, phraseCandidate)); } + private sealed class TestStopRewriteQueryAnonymousClass : Query { public override string ToString(string field) diff --git a/src/Lucene.Net.Tests.Join/Support/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/Support/TestJoinUtil.cs index 46e80a99c7..7b0ee0d718 100644 --- a/src/Lucene.Net.Tests.Join/Support/TestJoinUtil.cs +++ b/src/Lucene.Net.Tests.Join/Support/TestJoinUtil.cs @@ -184,7 +184,7 @@ protected virtual void Test300spartans(bool multipleValues, ScoreMode scoreMode) /// /// LUCENE-5487: verify a join query inside a SHOULD BQ - /// will still use the join query's optimized BulkScorers + /// will still use the join query's optimized BulkScorers /// [Test] public void TestInsideBooleanQuery() @@ -261,7 +261,7 @@ public void TestInsideBooleanQuery() private sealed class CollectorAnonymousClass : ICollector { - internal bool sawFive; + private bool sawFive; public void SetNextReader(AtomicReaderContext context) { @@ -522,9 +522,9 @@ private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, private sealed class CollectorAnonymousClass2 : ICollector { - private bool scoreDocsInOrder; - private FixedBitSet actualResult; - private TopScoreDocCollector topScoreDocCollector; + private readonly bool scoreDocsInOrder; + private readonly FixedBitSet actualResult; + private readonly TopScoreDocCollector topScoreDocCollector; public CollectorAnonymousClass2(bool scoreDocsInOrder, FixedBitSet actualResult, @@ -535,7 +535,6 @@ public CollectorAnonymousClass2(bool scoreDocsInOrder, this.topScoreDocCollector = topScoreDocCollector; } - private int _docBase; public void Collect(int doc) @@ -759,10 +758,9 @@ public CollectorAnonymousClass3(string fromField, joinValue = new BytesRef(); } - private Scorer scorer; private SortedSetDocValues docTermOrds; - internal readonly BytesRef joinValue; + private readonly BytesRef joinValue; public void Collect(int doc) { @@ -1082,4 +1080,4 @@ internal virtual float Score(ScoreMode mode) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs index f02d7c1e72..69d2d27380 100644 --- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs +++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs @@ -181,7 +181,7 @@ protected virtual void Test300spartans(bool multipleValues, ScoreMode scoreMode) /// /// LUCENE-5487: verify a join query inside a SHOULD BQ - /// will still use the join query's optimized BulkScorers + /// will still use the join query's optimized BulkScorers /// [Test] public void TestInsideBooleanQuery() @@ -258,7 +258,7 @@ public void TestInsideBooleanQuery() private sealed class CollectorAnonymousClass : ICollector { - internal bool sawFive; + private bool sawFive; public void SetNextReader(AtomicReaderContext context) { @@ -397,7 +397,7 @@ public void TestMultiValueRandomJoin() int maxSearchIter = TestUtil.NextInt32(Random, 6, 12); ExecuteRandomJoin(true, maxIndexIter, maxSearchIter, TestUtil.NextInt32(Random, 11, 57)); } - + private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, int maxSearchIter, int numberOfDocumentsToIndex) { @@ -436,7 +436,7 @@ private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, { Console.WriteLine("actualQuery=" + actualQuery); } - + var scoreModeLength = Enum.GetNames(typeof(ScoreMode)).Length; ScoreMode scoreMode = (ScoreMode) Random.Next(scoreModeLength); if (Verbose) @@ -519,9 +519,9 @@ private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, private sealed class CollectorAnonymousClass2 : ICollector { - private bool scoreDocsInOrder; - private FixedBitSet actualResult; - private TopScoreDocCollector topScoreDocCollector; + private readonly bool scoreDocsInOrder; + private readonly FixedBitSet actualResult; + private readonly TopScoreDocCollector topScoreDocCollector; public CollectorAnonymousClass2(bool scoreDocsInOrder, FixedBitSet actualResult, @@ -532,9 +532,8 @@ public CollectorAnonymousClass2(bool scoreDocsInOrder, this.topScoreDocCollector = topScoreDocCollector; } - private int _docBase; - + public void Collect(int doc) { actualResult.Set(doc + _docBase); @@ -546,7 +545,7 @@ public void SetNextReader(AtomicReaderContext context) _docBase = context.DocBase; topScoreDocCollector.SetNextReader(context); } - + public void SetScorer(Scorer scorer) { topScoreDocCollector.SetScorer(scorer); @@ -554,12 +553,12 @@ public void SetScorer(Scorer scorer) public bool AcceptsDocsOutOfOrder => scoreDocsInOrder; } - + private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter writer, bool multipleValuesPerDocument, bool scoreDocsInOrder) { return CreateContext(nDocs, writer, writer, multipleValuesPerDocument, scoreDocsInOrder); } - + private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter fromWriter, RandomIndexWriter toWriter, bool multipleValuesPerDocument, bool scoreDocsInOrder) { @@ -756,11 +755,10 @@ public CollectorAnonymousClass3(string fromField, joinValue = new BytesRef(); } - private Scorer scorer; private SortedSetDocValues docTermOrds; - internal readonly BytesRef joinValue; - + private readonly BytesRef joinValue; + public void Collect(int doc) { docTermOrds.SetDocument(doc); @@ -775,7 +773,7 @@ public void Collect(int doc) joinScore.AddScore(scorer.GetScore()); } } - + public void SetNextReader(AtomicReaderContext context) { docTermOrds = FieldCache.DEFAULT.GetDocTermOrds(context.AtomicReader, fromField); @@ -802,12 +800,11 @@ public CollectorAnonymousClass4(string fromField, spare = new BytesRef(); } - private Scorer scorer; private BinaryDocValues terms; private IBits docsWithField; private readonly BytesRef spare; - + public void Collect(int doc) { terms.Get(doc, spare); @@ -823,7 +820,7 @@ public void Collect(int doc) } joinScore.AddScore(scorer.GetScore()); } - + public void SetNextReader(AtomicReaderContext context) { terms = FieldCache.DEFAULT.GetTerms(context.AtomicReader, fromField, true); @@ -849,7 +846,7 @@ private sealed class CollectorAnonymousClass5 : ICollector private int docBase; public CollectorAnonymousClass5( - string toField, IDictionary joinValueToJoinScores, + string toField, IDictionary joinValueToJoinScores, IDictionary docToJoinScore) { this.toField = toField; @@ -877,7 +874,7 @@ public void Collect(int doc) } } } - + public void SetNextReader(AtomicReaderContext context) { docBase = context.DocBase; @@ -902,8 +899,8 @@ private sealed class CollectorAnonymousClass6 : ICollector private readonly BytesRef spare = new BytesRef(); public CollectorAnonymousClass6( - string toField, - IDictionary joinValueToJoinScores, + string toField, + IDictionary joinValueToJoinScores, IDictionary docToJoinScore) { this.toField = toField; @@ -920,7 +917,7 @@ public void Collect(int doc) } docToJoinScore[docBase + doc] = joinScore; } - + public void SetNextReader(AtomicReaderContext context) { terms = FieldCache.DEFAULT.GetTerms(context.AtomicReader, toField, false); @@ -1079,4 +1076,4 @@ internal virtual float Score(ScoreMode mode) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs index ab012dec22..1ce9a2a394 100644 --- a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs +++ b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs @@ -193,10 +193,10 @@ protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContex FieldCache.Int32s values = FieldCache.DEFAULT.GetInt32s(context.AtomicReader, INT_FIELD, false); return new CustomScoreProviderAnonymousClass(context, values); } - + private sealed class CustomScoreProviderAnonymousClass : CustomScoreProvider { - private FieldCache.Int32s values; + private readonly FieldCache.Int32s values; public CustomScoreProviderAnonymousClass(AtomicReaderContext context, FieldCache.Int32s values) : base(context) { @@ -214,7 +214,7 @@ public CustomExternalQuery(Query q) : base(q) { } } - + [Test] public virtual void TestCustomExternalQuery() { @@ -238,8 +238,8 @@ public virtual void TestCustomExternalQuery() } r.Dispose(); } - - [Test] + + [Test] public virtual void TestRewrite() { IndexReader r = DirectoryReader.Open(dir); @@ -306,7 +306,7 @@ private void DoTestCustomScore(ValueSource valueSource, double dboost) q5CustomMulAdd.Boost = boost; Log(q5CustomMulAdd); - // do al the searches + // do al the searches TopDocs td1 = s.Search(q1, null, 1000); TopDocs td2CustomNeutral = s.Search(q2CustomNeutral, null, 1000); TopDocs td3CustomMul = s.Search(q3CustomMul, null, 1000); @@ -374,7 +374,7 @@ private void VerifyResults(float boost, IndexSearcher s, assertEquals("new score for custom mul add", boost * fieldScore * (score1 + fieldScore), score5, CheckHits.ExplainToleranceDelta(boost * fieldScore * (score1 + fieldScore), score5)); } } - + private void LogResult(string msg, IndexSearcher s, Query q, int doc, float score1) { Log(msg + " " + score1); diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs index cf96e2b409..bfc219bea0 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs @@ -346,7 +346,7 @@ public void TestConsistencyOnExceptions() } // disable errors -- maybe randomness didn't exhaust all allowed failures, - // and we don't want e.g. CheckIndex to hit false errors. + // and we don't want e.g. CheckIndex to hit false errors. handlerIndexDir.MaxSizeInBytes = (0); handlerIndexDir.RandomIOExceptionRate = (0.0); handlerIndexDir.RandomIOExceptionRateOnOpen = (0.0); @@ -425,8 +425,6 @@ public Directory GetDirectory(string sessionId, string source) } } - - private sealed class ReplicationClientAnonymousClass : ReplicationClient { private readonly IndexAndTaxonomyReplicationClientTest test; @@ -497,6 +495,5 @@ protected override void HandleUpdateException(Exception exception) } } } - } } diff --git a/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs b/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs index bbf25b2de8..740e83be55 100644 --- a/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs +++ b/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs @@ -282,6 +282,7 @@ public void TestForwardOffsets() private sealed class AnalyzerWrapperAnonymousClass : AnalyzerWrapper { private readonly Analyzer @delegate; + public AnalyzerWrapperAnonymousClass(Analyzer @delegate) : base(@delegate.Strategy) { @@ -292,6 +293,7 @@ protected override TextReader WrapReader(string fieldName, TextReader reader) { return new MockCharFilter(reader, 7); } + protected override Analyzer GetWrappedAnalyzer(string fieldName) { return @delegate; @@ -314,7 +316,7 @@ public void TestWrapReader() // LUCENENET NOTE: This has some compatibility issues with Lucene 4.8.1, but need this test when // DelegatingAnalyzerWrapper is ported //[Test] - //public void TestChangeGaps() + //public void TestChangeGaps() //{ // // LUCENE-5324: check that it is possible to change the wrapper's gaps // int positionGap = Random.nextInt(1000); diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs index a90626bc14..649943eccc 100644 --- a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs +++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs @@ -1210,7 +1210,7 @@ public virtual void TestStressMultiThreading() { string f = "f" + i; string cf = "cf" + i; - threads[i] = new ThreadAnonymousClass(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); + threads[i] = new ThreadAnonymousClass("UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); } foreach (ThreadJob t in threads) @@ -1252,8 +1252,6 @@ public virtual void TestStressMultiThreading() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestBinaryDocValuesUpdates outerInstance; - private readonly IndexWriter writer; private readonly int numDocs; private readonly CountdownEvent done; @@ -1261,10 +1259,9 @@ private sealed class ThreadAnonymousClass : ThreadJob private readonly string f; private readonly string cf; - public ThreadAnonymousClass(TestBinaryDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf) + public ThreadAnonymousClass(string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf) : base(str) { - this.outerInstance = outerInstance; this.writer = writer; this.numDocs = numDocs; this.done = done; @@ -1425,7 +1422,7 @@ public virtual void TestChangeCodec() Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions. - conf.SetCodec(new Lucene46CodecAnonymousClass2(this)); + conf.SetCodec(new Lucene46CodecAnonymousClass2()); IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone()); Document doc = new Document(); doc.Add(new StringField("id", "d0", Store.NO)); @@ -1435,7 +1432,7 @@ public virtual void TestChangeCodec() writer.Dispose(); // change format - conf.SetCodec(new Lucene46CodecAnonymousClass3(this)); + conf.SetCodec(new Lucene46CodecAnonymousClass3()); writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone()); doc = new Document(); doc.Add(new StringField("id", "d1", Store.NO)); @@ -1460,13 +1457,6 @@ public virtual void TestChangeCodec() private sealed class Lucene46CodecAnonymousClass2 : Lucene46Codec { - private readonly TestBinaryDocValuesUpdates outerInstance; - - public Lucene46CodecAnonymousClass2(TestBinaryDocValuesUpdates outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocValuesFormat GetDocValuesFormatForField(string field) { return new Lucene45DocValuesFormat(); @@ -1475,13 +1465,6 @@ public override DocValuesFormat GetDocValuesFormatForField(string field) private sealed class Lucene46CodecAnonymousClass3 : Lucene46Codec { - private readonly TestBinaryDocValuesUpdates outerInstance; - - public Lucene46CodecAnonymousClass3(TestBinaryDocValuesUpdates outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocValuesFormat GetDocValuesFormatForField(string field) { return new AssertingDocValuesFormat(); diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs index cecfbd61a6..2584964f63 100644 --- a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs +++ b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs @@ -63,7 +63,7 @@ public virtual void TestReopen_Mem() private sealed class TestReopenAnonymousClass : TestReopen { - private Directory dir1; + private readonly Directory dir1; public TestReopenAnonymousClass(Directory dir1) { diff --git a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs index 60056d5b42..6466dd9563 100644 --- a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs @@ -202,8 +202,8 @@ public TokenFilterAnonymousClass(Tokenizer tokenizer) posIncrAtt = AddAttribute(); } - internal bool first = true; - internal AttributeSource.State state; + private bool first = true; + private AttributeSource.State state; public sealed override bool IncrementToken() { @@ -245,9 +245,9 @@ public sealed override void Reset() state = null; } - internal readonly ICharTermAttribute termAtt; - internal readonly IPayloadAttribute payloadAtt; - internal readonly IPositionIncrementAttribute posIncrAtt; + private readonly ICharTermAttribute termAtt; + private readonly IPayloadAttribute payloadAtt; + private readonly IPositionIncrementAttribute posIncrAtt; } } @@ -291,10 +291,10 @@ public TokenStreamAnonymousClass() termAtt = AddAttribute(); } - private string[] tokens = new string[] { "term1", "term2", "term3", "term2" }; + private readonly string[] tokens = new string[] { "term1", "term2", "term3", "term2" }; private int index /*= 0*/; - private ICharTermAttribute termAtt; + private readonly ICharTermAttribute termAtt; public sealed override bool IncrementToken() { diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs index 382cf17831..d75c012981 100644 --- a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs +++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs @@ -232,7 +232,7 @@ public virtual void TestPartiallyAppliedGlobalSlice() private sealed class ThreadAnonymousClass : ThreadJob { - private DocumentsWriterDeleteQueue queue; + private readonly DocumentsWriterDeleteQueue queue; public ThreadAnonymousClass(DocumentsWriterDeleteQueue queue) { diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index 905e5256ee..7e8db747d5 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -1052,11 +1052,11 @@ public TokenStreamAnonymousClass() first = true; } - internal readonly ICharTermAttribute termAtt; - internal readonly IPositionIncrementAttribute posIncrAtt; + private readonly ICharTermAttribute termAtt; + private readonly IPositionIncrementAttribute posIncrAtt; - internal readonly IEnumerator terms; - internal bool first; + private readonly IEnumerator terms; + private bool first; public sealed override bool IncrementToken() { @@ -2894,7 +2894,7 @@ public virtual void TestMergeAllDeleted() private sealed class TestPointAnonymousClass : ITestPoint { - private SetOnce iwRef; + private readonly SetOnce iwRef; public TestPointAnonymousClass(SetOnce iwRef) { diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs index c9fb51f418..5afa9e51df 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs @@ -132,7 +132,7 @@ public EnumeratorAnonymousClass(DocCopyIterator outerInstance) this.outerInstance = outerInstance; } - internal int upto; + private int upto; private Document current; public bool MoveNext() @@ -166,9 +166,9 @@ private class IndexerThread : ThreadJob { private readonly TestIndexWriterExceptions outerInstance; - internal IndexWriter writer; + private readonly IndexWriter writer; - internal readonly Random r = new J2N.Randomizer(Random.NextInt64()); + private readonly Random r = new J2N.Randomizer(Random.NextInt64()); internal volatile Exception failure = null; public IndexerThread(TestIndexWriterExceptions outerInstance, int i, IndexWriter writer) @@ -295,7 +295,7 @@ public TestPoint1(TestIndexWriterExceptions outerInstance) this.outerInstance = outerInstance; } - internal Random r = new J2N.Randomizer(Random.NextInt64()); + private Random r = new J2N.Randomizer(Random.NextInt64()); public void Apply(string name) { @@ -677,8 +677,8 @@ public override void Reset() private class FailOnlyOnFlush : Failure { - internal bool doFail = false; - internal int count; + private bool doFail = false; + private int count; public override void SetDoFail() { diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs index 617a12bc5e..3a761b4578 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs @@ -1056,7 +1056,7 @@ public ThreadAnonymousClass2(IndexWriter writer, long endTime, ConcurrentQueue hits) this.hits = hits; } - internal int docBase; + private int docBase; public void SetScorer(Scorer scorer) { diff --git a/src/Lucene.Net.Tests/Search/TestCachingCollector.cs b/src/Lucene.Net.Tests/Search/TestCachingCollector.cs index f1c4f1d30c..a2fb75cd5b 100644 --- a/src/Lucene.Net.Tests/Search/TestCachingCollector.cs +++ b/src/Lucene.Net.Tests/Search/TestCachingCollector.cs @@ -106,7 +106,7 @@ public virtual void TestBasic() private sealed class CollectorAnonymousClass : ICollector { - internal int prevDocID = -1; + private int prevDocID = -1; public void SetScorer(Scorer scorer) { diff --git a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs index 4df3b8426d..e94b0da9f2 100644 --- a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs +++ b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs @@ -303,7 +303,7 @@ private sealed class SearcherFactoryAnonymousClass : SearcherFactory { private readonly TestControlledRealTimeReopenThread outerInstance; - private TaskScheduler es; + private readonly TaskScheduler es; public SearcherFactoryAnonymousClass(TestControlledRealTimeReopenThread outerInstance, TaskScheduler es) { @@ -604,7 +604,7 @@ public virtual void TestListenerCalled() private sealed class RefreshListenerAnonymousClass : ReferenceManager.IRefreshListener { - private AtomicBoolean afterRefreshCalled; + private readonly AtomicBoolean afterRefreshCalled; public RefreshListenerAnonymousClass(AtomicBoolean afterRefreshCalled) { @@ -614,6 +614,7 @@ public RefreshListenerAnonymousClass(AtomicBoolean afterRefreshCalled) public void BeforeRefresh() { } + public void AfterRefresh(bool didRefresh) { if (didRefresh) @@ -697,9 +698,9 @@ public virtual void TestCRTReopen() private sealed class RunnableAnonymousClass : ThreadJob { - private SnapshotDeletionPolicy sdp; - private Directory dir; - private IndexWriter iw; + private readonly SnapshotDeletionPolicy sdp; + private readonly Directory dir; + private readonly IndexWriter iw; public RunnableAnonymousClass(SnapshotDeletionPolicy sdp, Directory dir, IndexWriter iw) { @@ -728,7 +729,6 @@ public override void Run() } } - /// /// This test was purposely written in a way that demonstrates how to use the /// ControlledRealTimeReopenThread. It contains seperate Asserts for each of diff --git a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs index d4d38affba..57b8f1262e 100644 --- a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs +++ b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs @@ -99,7 +99,7 @@ public DocIdSetIteratorAnonymousClass(DocIdSetAnonymousClass outerInstance) docid = -1; } - internal int docid; + private int docid; public override int DocID => docid; diff --git a/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs b/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs index 2aaa6eeea4..d6649a650f 100644 --- a/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs +++ b/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs @@ -123,7 +123,7 @@ public virtual void TestSimple() private sealed class PerFieldSimilarityWrapperAnonymousClass : PerFieldSimilarityWrapper { - private Similarity @base; + private readonly Similarity @base; public PerFieldSimilarityWrapperAnonymousClass(Similarity @base) { diff --git a/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs b/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs index 5f711568a4..cf5ab937ae 100644 --- a/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs +++ b/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs @@ -77,8 +77,8 @@ public virtual void TestEarlyTermination_Mem() private sealed class CollectorAnonymousClass : ICollector { - internal readonly bool outOfOrder = Random.NextBoolean(); - internal bool collectionTerminated = true; + private readonly bool outOfOrder = Random.NextBoolean(); + private bool collectionTerminated = true; public void SetScorer(Scorer scorer) { diff --git a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs index e7dedc6895..c35ebe9fcd 100644 --- a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs +++ b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs @@ -167,10 +167,10 @@ public FieldComparerAnonymousClass(ElevationComparerSource outerInstance, string tempBR = new BytesRef(); } - internal SortedDocValues idIndex; + private SortedDocValues idIndex; private readonly int[] values; private readonly BytesRef tempBR; - internal int bottomVal; + private int bottomVal; public override int CompareValues(J2N.Numerics.Int32 first, J2N.Numerics.Int32 second) { diff --git a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs index 6b45085e01..7904884439 100644 --- a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs +++ b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs @@ -646,8 +646,8 @@ public DocIdSetIteratorAnonymousClass(DocIdSetAnonymousClass2 outerInstance, Doc this.termDocsEnum = termDocsEnum; } - internal bool nextCalled; - internal bool advanceCalled; + private bool nextCalled; + private bool advanceCalled; public override int NextDoc() { diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs index 333c4e488c..57702fc98a 100644 --- a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs +++ b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs @@ -217,7 +217,7 @@ public TermRangeTermsEnumAnonymousClass(TermsEnum iterator, BytesRef bref1, Byte boostAtt = Attributes.AddAttribute(); } - internal readonly IBoostAttribute boostAtt; + private readonly IBoostAttribute boostAtt; protected override AcceptStatus Accept(BytesRef term) { diff --git a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs index 446d4364c3..f1c3207e24 100644 --- a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs +++ b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs @@ -190,9 +190,9 @@ public TokenizerAnonymousClass(TextReader reader) private readonly int[] INCREMENTS; private int i; - internal IPositionIncrementAttribute posIncrAtt; - internal ICharTermAttribute termAtt; - internal IOffsetAttribute offsetAtt; + private readonly IPositionIncrementAttribute posIncrAtt; + private readonly ICharTermAttribute termAtt; + private readonly IOffsetAttribute offsetAtt; public override sealed bool IncrementToken() { diff --git a/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs b/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs index cf069d9e9b..95bc768a39 100644 --- a/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs +++ b/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs @@ -483,7 +483,7 @@ public ScorerAnonymousClass(WeightAnonymousClass outerInstance, AtomicReaderCont docID = -1; } - internal int docID; + private int docID; public override int DocID => docID; diff --git a/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs b/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs index cc54e3cf12..609f1d5ce6 100644 --- a/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs +++ b/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs @@ -123,7 +123,7 @@ public virtual void TestCustomProvider() private sealed class AutomatonProviderAnonymousClass : IAutomatonProvider { // automaton that matches quick or brown - private Automaton quickBrownAutomaton = BasicOperations.Union(new Automaton[] + private readonly Automaton quickBrownAutomaton = BasicOperations.Union(new Automaton[] { BasicAutomata.MakeString("quick"), BasicAutomata.MakeString("brown"), diff --git a/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs b/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs index 1060663104..e2b924cc07 100644 --- a/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs +++ b/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs @@ -134,7 +134,7 @@ public ThreadAnonymousClass(TestSearchWithThreads outerInstance, IndexSearcher s col = new TotalHitCountCollector(); } - internal TotalHitCountCollector col; + private readonly TotalHitCountCollector col; public override void Run() { diff --git a/src/Lucene.Net.Tests/Search/TestSearcherManager.cs b/src/Lucene.Net.Tests/Search/TestSearcherManager.cs index 2f1449d242..5e2fe78eb3 100644 --- a/src/Lucene.Net.Tests/Search/TestSearcherManager.cs +++ b/src/Lucene.Net.Tests/Search/TestSearcherManager.cs @@ -102,7 +102,7 @@ private sealed class SearcherFactoryAnonymousClass : SearcherFactory { private readonly TestSearcherManager outerInstance; - private TaskScheduler es; + private readonly TaskScheduler es; public SearcherFactoryAnonymousClass(TestSearcherManager outerInstance, TaskScheduler es) { @@ -134,7 +134,7 @@ private sealed class ThreadAnonymousClass : ThreadJob { private readonly TestSearcherManager outerInstance; - private long stopTime; + private readonly long stopTime; public ThreadAnonymousClass(TestSearcherManager outerInstance, long stopTime) { @@ -333,10 +333,10 @@ public virtual void TestIntermediateClose() private sealed class SearcherFactoryAnonymousClass2 : SearcherFactory { - private CountdownEvent awaitEnterWarm; - private CountdownEvent awaitClose; - private AtomicBoolean triedReopen; - private TaskScheduler es; + private readonly CountdownEvent awaitEnterWarm; + private readonly CountdownEvent awaitClose; + private readonly AtomicBoolean triedReopen; + private readonly TaskScheduler es; public SearcherFactoryAnonymousClass2(CountdownEvent awaitEnterWarm, CountdownEvent awaitClose, AtomicBoolean triedReopen, TaskScheduler es) { @@ -366,10 +366,10 @@ public override IndexSearcher NewSearcher(IndexReader r) private sealed class RunnableAnonymousClass //: IThreadRunnable { - private AtomicBoolean triedReopen; - private SearcherManager searcherManager; - private AtomicBoolean success; - private Exception[] exc; + private readonly AtomicBoolean triedReopen; + private readonly SearcherManager searcherManager; + private readonly AtomicBoolean success; + private readonly Exception[] exc; public RunnableAnonymousClass(AtomicBoolean triedReopen, SearcherManager searcherManager, AtomicBoolean success, Exception[] exc) { @@ -511,7 +511,7 @@ public virtual void TestListenerCalled() private sealed class RefreshListenerAnonymousClass : ReferenceManager.IRefreshListener { - private AtomicBoolean afterRefreshCalled; + private readonly AtomicBoolean afterRefreshCalled; public RefreshListenerAnonymousClass(AtomicBoolean afterRefreshCalled) { @@ -545,7 +545,7 @@ public virtual void TestEvilSearcherFactory() try { - new SearcherManager(dir, theEvilOne); + _ = new SearcherManager(dir, theEvilOne); } catch (Exception ise) when (ise.IsIllegalStateException()) { @@ -553,7 +553,7 @@ public virtual void TestEvilSearcherFactory() } try { - new SearcherManager(w.IndexWriter, random.NextBoolean(), theEvilOne); + _ = new SearcherManager(w.IndexWriter, random.NextBoolean(), theEvilOne); } catch (Exception ise) when (ise.IsIllegalStateException()) { @@ -566,7 +566,7 @@ public virtual void TestEvilSearcherFactory() private sealed class SearcherFactoryAnonymousClass3 : SearcherFactory { - private IndexReader other; + private readonly IndexReader other; public SearcherFactoryAnonymousClass3(IndexReader other) { @@ -603,7 +603,7 @@ public virtual void TestMaybeRefreshBlockingLock() private sealed class ThreadAnonymousClass2 : ThreadJob { - private SearcherManager sm; + private readonly SearcherManager sm; public ThreadAnonymousClass2(SearcherManager sm) { diff --git a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs index 17ef5195fa..9f1c0fe084 100644 --- a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs +++ b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs @@ -239,20 +239,13 @@ public virtual void SetNextReader(AtomicReaderContext context) /// checks that no scores or freqs are infinite private void AssertSaneScoring(PhraseQuery pq, IndexSearcher searcher) { - searcher.Search(pq, new CollectorAnonymousClass(this)); + searcher.Search(pq, new CollectorAnonymousClass()); QueryUtils.Check(Random, pq, searcher); } private sealed class CollectorAnonymousClass : ICollector { - private readonly TestSloppyPhraseQuery outerInstance; - - public CollectorAnonymousClass(TestSloppyPhraseQuery outerInstance) - { - this.outerInstance = outerInstance; - } - - internal Scorer scorer; + private Scorer scorer; public void SetScorer(Scorer scorer) { diff --git a/src/Lucene.Net.Tests/Search/TestTermScorer.cs b/src/Lucene.Net.Tests/Search/TestTermScorer.cs index 34491b4642..5cf5b3c774 100644 --- a/src/Lucene.Net.Tests/Search/TestTermScorer.cs +++ b/src/Lucene.Net.Tests/Search/TestTermScorer.cs @@ -91,7 +91,7 @@ public virtual void Test() IList docs = new JCG.List(); // must call next first - ts.Score(new CollectorAnonymousClass(this, docs)); + ts.Score(new CollectorAnonymousClass(docs)); Assert.IsTrue(docs.Count == 2, "docs Size: " + docs.Count + " is not: " + 2); TestHit doc0 = docs[0]; TestHit doc5 = docs[1]; @@ -111,13 +111,10 @@ public virtual void Test() private sealed class CollectorAnonymousClass : ICollector { - private readonly TestTermScorer outerInstance; - private readonly IList docs; - public CollectorAnonymousClass(TestTermScorer outerInstance, IList docs) + public CollectorAnonymousClass(IList docs) { - this.outerInstance = outerInstance; this.docs = docs; @base = 0; } diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs index bff040174a..6beae22b68 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs @@ -42,7 +42,7 @@ public DocIdSetIteratorAnonymousClass(BitSet bs, int numBits) this.numBits = numBits; } - int doc = -1; + private int doc = -1; public override int NextDoc() { diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs index 9bc3be19c1..14fd869e2c 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs @@ -391,9 +391,9 @@ public DataInputAnonymousClass(CompressingStoredFieldsReader outerInstance, int decompressed = outerInstance.bytes.Length; } - internal int decompressed; + private int decompressed; - internal void FillBuffer() + private void FillBuffer() { if (Debugging.AssertsEnabled) Debugging.Assert(decompressed <= length); if (decompressed == length) diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs index 053ded643d..1acf1a257e 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs @@ -62,11 +62,6 @@ public Lucene3xCodec() private sealed class DocValuesFormatAnonymousClass : DocValuesFormat { - public DocValuesFormatAnonymousClass() - : base() - { - } - public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) { throw UnsupportedOperationException.Create("this codec cannot write docvalues"); @@ -124,4 +119,4 @@ public static ISet GetDocStoreFiles(SegmentInfo info) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs index 179cf0d6ac..7317d1db22 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs @@ -749,9 +749,9 @@ public RandomAccessOrdsAnonymousClass(long valueCount, Lucene45DocValuesProducer this.ordIndex = ordIndex; } - internal long startOffset; - internal long offset; - internal long endOffset; + private long startOffset; + private long offset; + private long endOffset; [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long NextOrd() diff --git a/src/Lucene.Net/Index/DocValues.cs b/src/Lucene.Net/Index/DocValues.cs index 4ef2be87a8..77039091c0 100644 --- a/src/Lucene.Net/Index/DocValues.cs +++ b/src/Lucene.Net/Index/DocValues.cs @@ -40,10 +40,6 @@ private DocValues() private sealed class BinaryDocValuesAnonymousClass : BinaryDocValues { - public BinaryDocValuesAnonymousClass() - { - } - public override void Get(int docID, BytesRef result) { result.Bytes = BytesRef.EMPTY_BYTES; @@ -59,10 +55,6 @@ public override void Get(int docID, BytesRef result) private sealed class NumericDocValuesAnonymousClass : NumericDocValues { - public NumericDocValuesAnonymousClass() - { - } - public override long Get(int docID) { return 0; @@ -76,10 +68,6 @@ public override long Get(int docID) private sealed class SortedDocValuesAnonymousClass : SortedDocValues { - public SortedDocValuesAnonymousClass() - { - } - public override int GetOrd(int docID) { return -1; @@ -102,10 +90,6 @@ public override void LookupOrd(int ord, BytesRef result) private sealed class RandomAccessOrdsAnonymousClass : RandomAccessOrds { - public RandomAccessOrdsAnonymousClass() - { - } - public override long NextOrd() { return NO_MORE_ORDS; @@ -209,4 +193,4 @@ public bool Get(int index) public int Length => maxDoc; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs index 2bec9dd03b..e71af22fe9 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs @@ -62,10 +62,6 @@ internal abstract class IndexingChain private sealed class IndexingChainAnonymousClass : IndexingChain { - public IndexingChainAnonymousClass() - { - } - internal override DocConsumer GetChain(DocumentsWriterPerThread documentsWriterPerThread) { /* diff --git a/src/Lucene.Net/Search/DocIdSetIterator.cs b/src/Lucene.Net/Search/DocIdSetIterator.cs index f52ae1648d..42c045e66b 100644 --- a/src/Lucene.Net/Search/DocIdSetIterator.cs +++ b/src/Lucene.Net/Search/DocIdSetIterator.cs @@ -37,11 +37,7 @@ public static DocIdSetIterator GetEmpty() private sealed class DocIdSetIteratorAnonymousClass : DocIdSetIterator { - public DocIdSetIteratorAnonymousClass() - { - } - - internal bool exhausted = false; + private bool exhausted = false; public override int Advance(int target) { @@ -114,10 +110,10 @@ public override long GetCost() /// When target > current it behaves as if written: /// /// - /// int Advance(int target) + /// int Advance(int target) /// { /// int doc; - /// while ((doc = NextDoc()) < target) + /// while ((doc = NextDoc()) < target) /// { /// } /// return doc; @@ -160,4 +156,4 @@ protected internal int SlowAdvance(int target) /// public abstract long GetCost(); } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Search/MultiTermQuery.cs b/src/Lucene.Net/Search/MultiTermQuery.cs index 622bdc4f30..f0878d47cc 100644 --- a/src/Lucene.Net/Search/MultiTermQuery.cs +++ b/src/Lucene.Net/Search/MultiTermQuery.cs @@ -28,11 +28,11 @@ namespace Lucene.Net.Search /// /// An abstract that matches documents - /// containing a subset of terms provided by a + /// containing a subset of terms provided by a /// enumeration. /// /// This query cannot be used directly; you must subclass - /// it and define to provide a + /// it and define to provide a /// that iterates through the terms to be /// matched. /// @@ -41,12 +41,12 @@ namespace Lucene.Net.Search /// , you may encounter a /// exception during /// searching, which happens when the number of terms to be - /// searched exceeds - /// . Setting + /// searched exceeds + /// . Setting /// to /// prevents this. /// - /// The recommended rewrite method is + /// The recommended rewrite method is /// : it doesn't spend CPU /// computing unhelpful scores, and it tries to pick the most /// performant rewrite method given the query. If you @@ -56,7 +56,7 @@ namespace Lucene.Net.Search /// and not hit this limitation. /// /// Note that QueryParsers.Classic.QueryParser produces - /// s using + /// s using /// by default. /// public abstract class MultiTermQuery : Query @@ -96,10 +96,6 @@ protected virtual TermsEnum GetTermsEnum(MultiTermQuery query, Terms terms, Attr private sealed class RewriteMethodAnonymousClass : RewriteMethod { - public RewriteMethodAnonymousClass() - { - } - public override Query Rewrite(IndexReader reader, MultiTermQuery query) { Query result = new ConstantScoreQuery(new MultiTermQueryWrapperFilter(query)); @@ -114,10 +110,10 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) /// , and keeps the scores as computed by the /// query. Note that typically such scores are /// meaningless to the user, and require non-trivial CPU - /// to compute, so it's almost always better to use + /// to compute, so it's almost always better to use /// instead. /// - /// NOTE: this rewrite method will hit + /// NOTE: this rewrite method will hit /// if the number of terms /// exceeds . /// @@ -130,7 +126,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) /// document receives a constant score equal to the /// query's boost. /// - /// NOTE: this rewrite method will hit + /// NOTE: this rewrite method will hit /// if the number of terms /// exceeds . /// @@ -215,13 +211,13 @@ protected override void AddClause(BooleanQuery topLevel, Term term, int docFreq, } } - // LUCENENET specific - just use the non-nested class directly. This is + // LUCENENET specific - just use the non-nested class directly. This is // confusing in .NET. // /// // /// A rewrite method that tries to pick the best // /// constant-score rewrite method based on term and // /// document counts from the query. If both the number of -// /// terms and documents is small enough, then +// /// terms and documents is small enough, then // /// is used. // /// Otherwise, is // /// used. @@ -232,10 +228,10 @@ protected override void AddClause(BooleanQuery topLevel, Term term, int docFreq, /// /// Read-only default instance of - /// , with + /// , with /// set to /// - /// and + /// and /// set to /// . /// Note that you cannot alter the configuration of this @@ -246,10 +242,6 @@ protected override void AddClause(BooleanQuery topLevel, Term term, int docFreq, private sealed class ConstantScoreAutoRewriteAnonymousClass : ConstantScoreAutoRewrite { - public ConstantScoreAutoRewriteAnonymousClass() - { - } - public override int TermCountCutoff { get => base.TermCountCutoff; // LUCENENET specific - adding getter for API consistency check @@ -319,7 +311,7 @@ public override sealed Query Rewrite(IndexReader reader) /// query. You can use one of the four core methods, or /// implement your own subclass of . /// - public virtual RewriteMethod MultiTermRewriteMethod + public virtual RewriteMethod MultiTermRewriteMethod { get => m_rewriteMethod; set => m_rewriteMethod = value; @@ -364,4 +356,4 @@ public override bool Equals(object obj) return (other.m_field is null ? m_field is null : other.m_field.Equals(m_field, StringComparison.Ordinal)); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs index 28697939ce..043fa071dc 100644 --- a/src/Lucene.Net/Search/NumericRangeQuery.cs +++ b/src/Lucene.Net/Search/NumericRangeQuery.cs @@ -430,7 +430,7 @@ public Int64RangeBuilderAnonymousClass(NumericRangeTermsEnum outerInstance) this.outerInstance = outerInstance; } - public override sealed void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefixCoded) + public override void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefixCoded) { outerInstance.rangeBounds.Enqueue(minPrefixCoded); outerInstance.rangeBounds.Enqueue(maxPrefixCoded); @@ -446,7 +446,7 @@ public Int32RangeBuilderAnonymousClass(NumericRangeTermsEnum outerInstance) this.outerInstance = outerInstance; } - public override sealed void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefixCoded) + public override void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefixCoded) { outerInstance.rangeBounds.Enqueue(minPrefixCoded); outerInstance.rangeBounds.Enqueue(maxPrefixCoded); diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs index 102f7b7be8..31e4996fd7 100644 --- a/src/Lucene.Net/Search/ScoringRewrite.cs +++ b/src/Lucene.Net/Search/ScoringRewrite.cs @@ -48,10 +48,10 @@ public abstract class ScoringRewrite : TermCollectingRewrite where Q : Que /// , and keeps the scores as computed by the /// query. Note that typically such scores are /// meaningless to the user, and require non-trivial CPU - /// to compute, so it's almost always better to use + /// to compute, so it's almost always better to use /// instead. /// - /// NOTE: this rewrite method will hit + /// NOTE: this rewrite method will hit /// if the number of terms /// exceeds . /// @@ -60,10 +60,6 @@ public abstract class ScoringRewrite : TermCollectingRewrite where Q : Que private sealed class ScoringRewriteAnonymousClass : ScoringRewrite { - public ScoringRewriteAnonymousClass() - { - } - protected override BooleanQuery GetTopLevelQuery() { return new BooleanQuery(true); @@ -91,7 +87,7 @@ protected override void CheckMaxClauseCount(int count) /// document receives a constant score equal to the /// query's boost. /// - /// NOTE: this rewrite method will hit + /// NOTE: this rewrite method will hit /// if the number of terms /// exceeds . /// @@ -100,10 +96,6 @@ protected override void CheckMaxClauseCount(int count) private sealed class RewriteMethodAnonymousClass : RewriteMethod { - public RewriteMethodAnonymousClass() - { - } - public override Query Rewrite(IndexReader reader, MultiTermQuery query) { BooleanQuery bq = (BooleanQuery)SCORING_BOOLEAN_QUERY_REWRITE.Rewrite(reader, query); @@ -233,4 +225,4 @@ public override int[] Clear() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Search/SortField.cs b/src/Lucene.Net/Search/SortField.cs index fe632045e5..af8951da42 100644 --- a/src/Lucene.Net/Search/SortField.cs +++ b/src/Lucene.Net/Search/SortField.cs @@ -154,10 +154,6 @@ public SortField(string field, FieldCache.IParser parser, bool reverse) private sealed class ObjectAnonymousClass : object { - public ObjectAnonymousClass() - { - } - public override string ToString() { return "SortField.STRING_FIRST"; @@ -172,10 +168,6 @@ public override string ToString() private sealed class ObjectAnonymousClass2 : object { - public ObjectAnonymousClass2() - { - } - public override string ToString() { return "SortField.STRING_LAST"; @@ -288,7 +280,7 @@ private void InitFieldType(string field, SortFieldType type) /// /// Returns the type of contents in the field. - /// One of , , + /// One of , , /// , or . public virtual SortFieldType Type => type; @@ -384,7 +376,7 @@ public override string ToString() /// /// Returns true if is equal to this. If a - /// or + /// or /// was provided, it must properly /// implement equals (unless a singleton is always used). /// @@ -399,9 +391,9 @@ public override bool Equals(object o) return false; } SortField other = (SortField)o; - return (StringHelper.Equals(other.field, this.field) - && other.type == this.type - && other.reverse == this.reverse + return (StringHelper.Equals(other.field, this.field) + && other.type == this.type + && other.reverse == this.reverse && (other.comparerSource is null ? this.comparerSource is null : other.comparerSource.Equals(this.comparerSource))); } @@ -441,7 +433,7 @@ public virtual IComparer BytesComparer /// @lucene.experimental /// /// Number of top hits the queue will store - /// Position of this within + /// Position of this within /// . The comparer is primary if sortPos==0, /// secondary if sortPos==1, etc. Some comparers can /// optimize themselves when they are the primary sort. @@ -608,4 +600,4 @@ public enum SortFieldType // LUCENENET NOTE: de-nested and renamed from Type to /// REWRITEABLE } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs b/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs index 40fd9ebc7f..d0fb363ee6 100644 --- a/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs +++ b/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs @@ -165,18 +165,10 @@ public override bool Equals(object obj) private sealed class SpanRewriteMethodAnonymousClass : SpanRewriteMethod { - public SpanRewriteMethodAnonymousClass() - { - } - private readonly ScoringRewrite @delegate = new ScoringRewriteAnonymousClass(); private sealed class ScoringRewriteAnonymousClass : ScoringRewrite { - public ScoringRewriteAnonymousClass() - { - } - protected override SpanOrQuery GetTopLevelQuery() { return new SpanOrQuery(); @@ -313,4 +305,4 @@ public interface ISpanMultiTermQueryWrapper Query WrappedQuery { get; } Query Rewrite(IndexReader reader); } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Util/BytesRefArray.cs b/src/Lucene.Net/Util/BytesRefArray.cs index 0fc3fcce79..717c4dcb0a 100644 --- a/src/Lucene.Net/Util/BytesRefArray.cs +++ b/src/Lucene.Net/Util/BytesRefArray.cs @@ -222,7 +222,7 @@ public BytesRefIteratorAnonymousClass(BytesRefArray outerInstance, IComparer Comparer => comparer; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs index 300d581801..2bd8134aeb 100644 --- a/src/Lucene.Net/Util/Fst/NoOutputs.cs +++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs @@ -35,10 +35,6 @@ public sealed class NoOutputs : Outputs private sealed class ObjectAnonymousClass : object { - public ObjectAnonymousClass() - { - } - /// /// NodeHash calls hashCode for this output; we fix this /// so we get deterministic hashing. @@ -130,4 +126,4 @@ public override string OutputToString(object output) return ""; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs index 6e1f7f818b..e7add4fd80 100644 --- a/src/Lucene.Net/Util/RamUsageEstimator.cs +++ b/src/Lucene.Net/Util/RamUsageEstimator.cs @@ -1003,9 +1003,9 @@ public EnumeratorAnonymousClass(IdentityHashSet outerInstance) nextElement = FetchNext(); } - internal int pos; - internal object nextElement; - internal KType current; + private int pos; + private object nextElement; + private KType current; public bool MoveNext() {