Skip to content

Commit

Permalink
Converted anonymous IComparer<T> classes to use Comparer.Create() del…
Browse files Browse the repository at this point in the history
…egate method

Related Work Items: #1
  • Loading branch information
bongohrtech authored and NightOwl888 committed Mar 31, 2020
1 parent 1274197 commit d90db02
Show file tree
Hide file tree
Showing 39 changed files with 427 additions and 973 deletions.
98 changes: 42 additions & 56 deletions src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
Original file line number Diff line number Diff line change
Expand Up @@ -790,7 +790,48 @@ private void ReadDictionaryFiles(IList<Stream> dictionaries, Encoding decoder, B

FileInfo sorted = FileSupport.CreateTempFile("sorted", "dat", tempDir);

OfflineSorter sorter = new OfflineSorter(new ComparerAnonymousInnerClassHelper(this));
OfflineSorter sorter = new OfflineSorter(Comparer<BytesRef>.Create((o1, o2) =>
{
BytesRef scratch1 = new BytesRef();
BytesRef scratch2 = new BytesRef();
scratch1.Bytes = o1.Bytes;
scratch1.Offset = o1.Offset;
scratch1.Length = o1.Length;
for (int i = scratch1.Length - 1; i >= 0; i--)
{
if (scratch1.Bytes[scratch1.Offset + i] == this.FLAG_SEPARATOR)
{
scratch1.Length = i;
break;
}
}
scratch2.Bytes = o2.Bytes;
scratch2.Offset = o2.Offset;
scratch2.Length = o2.Length;
for (int i = scratch2.Length - 1; i >= 0; i--)
{
if (scratch2.Bytes[scratch2.Offset + i] == this.FLAG_SEPARATOR)
{
scratch2.Length = i;
break;
}
}
int cmp = scratch1.CompareTo(scratch2);
if (cmp == 0)
{
// tie break on whole row
return o1.CompareTo(o2);
}
else
{
return cmp;
}
}));
sorter.Sort(unsorted, sorted);
try
{
Expand Down Expand Up @@ -897,61 +938,6 @@ private void ReadDictionaryFiles(IList<Stream> dictionaries, Encoding decoder, B
}
}

private class ComparerAnonymousInnerClassHelper : IComparer<BytesRef>
{
private readonly Dictionary outerInstance;

public ComparerAnonymousInnerClassHelper(Dictionary outerInstance)
{
this.outerInstance = outerInstance;
scratch1 = new BytesRef();
scratch2 = new BytesRef();
}

internal BytesRef scratch1;
internal BytesRef scratch2;

public virtual int Compare(BytesRef o1, BytesRef o2)
{
scratch1.Bytes = o1.Bytes;
scratch1.Offset = o1.Offset;
scratch1.Length = o1.Length;

for (int i = scratch1.Length - 1; i >= 0; i--)
{
if (scratch1.Bytes[scratch1.Offset + i] == outerInstance.FLAG_SEPARATOR)
{
scratch1.Length = i;
break;
}
}

scratch2.Bytes = o2.Bytes;
scratch2.Offset = o2.Offset;
scratch2.Length = o2.Length;

for (int i = scratch2.Length - 1; i >= 0; i--)
{
if (scratch2.Bytes[scratch2.Offset + i] == outerInstance.FLAG_SEPARATOR)
{
scratch2.Length = i;
break;
}
}

int cmp = scratch1.CompareTo(scratch2);
if (cmp == 0)
{
// tie break on whole row
return o1.CompareTo(o2);
}
else
{
return cmp;
}
}
}

internal static char[] DecodeFlags(BytesRef b)
{
if (b.Length == 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public HunspellStemFilter(TokenStream input, Dictionary dictionary, bool dedup)
/// <param name="dictionary"> Hunspell <see cref="Dictionary"/> containing the affix rules and words that will be used to stem the tokens </param>
/// <param name="dedup"> remove duplicates </param>
/// <param name="longestOnly"> true if only the longest term should be output. </param>
public HunspellStemFilter(TokenStream input, Dictionary dictionary, bool dedup, bool longestOnly)
public HunspellStemFilter(TokenStream input, Dictionary dictionary, bool dedup, bool longestOnly)
: base(input)
{
this.dedup = dedup && longestOnly == false; // don't waste time deduping if longestOnly is set
Expand Down Expand Up @@ -145,26 +145,17 @@ public override void Reset()
buffer = null;
}

internal static readonly IComparer<CharsRef> lengthComparer = new ComparerAnonymousInnerClassHelper();

private class ComparerAnonymousInnerClassHelper : IComparer<CharsRef>
internal static readonly IComparer<CharsRef> lengthComparer = Comparer<CharsRef>.Create((o1, o2) =>
{
public ComparerAnonymousInnerClassHelper()
if (o2.Length == o1.Length)
{
// tie break on text
return o2.CompareTo(o1);
}

public virtual int Compare(CharsRef o1, CharsRef o2)
else
{
if (o2.Length == o1.Length)
{
// tie break on text
return o2.CompareTo(o1);
}
else
{
return o2.Length < o1.Length ? -1 : 1;
}
return o2.Length < o1.Length ? -1 : 1;
}
}
});
}
}
14 changes: 2 additions & 12 deletions src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public UserDictionary(TextReader reader)

// TODO: should we allow multiple segmentations per input 'phrase'?
// the old treemap didn't support this either, and i'm not sure if its needed/useful?
featureEntries.Sort(new ComparerAnonymousHelper());
featureEntries.Sort(Comparer<string[]>.Create((left, right) => left[0].CompareToOrdinal(right[0])));

List<string> data = new List<string>(featureEntries.Count);
List<int[]> segmentations = new List<int[]>(featureEntries.Count);
Expand Down Expand Up @@ -124,17 +124,7 @@ public UserDictionary(TextReader reader)
this.data = data.ToArray(/*new string[data.Count]*/);
this.segmentations = segmentations.ToArray(/*new int[segmentations.Count][]*/);
}

// LUCENENET TODO: Make an AnonymousComparer class in Support and
// replace all of these classes.
private class ComparerAnonymousHelper : IComparer<string[]>
{
public int Compare(string[] left, string[] right)
{
return left[0].CompareToOrdinal(right[0]);
}
}


/// <summary>
/// Lookup words in text.
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList<string> csvFiles)
Console.WriteLine(" sort...");

// sort by term: we sorted the files already and use a stable sort.
lines.Sort(new ComparerAnonymousHelper());
lines.Sort(Comparer<string[]>.Create((left, right) => left[0].CompareToOrdinal(right[0])));

Console.WriteLine(" encode...");

Expand Down Expand Up @@ -159,15 +159,7 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList<string> csvFiles)

return dictionary;
}

private class ComparerAnonymousHelper : IComparer<string[]>
{
public int Compare(string[] left, string[] right)
{
return left[0].CompareToOrdinal(right[0]);
}
}


/// <summary>
/// IPADIC features
///
Expand Down
16 changes: 6 additions & 10 deletions src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,12 @@ public virtual UnknownDictionaryWriter ReadDictionaryFile(string filename, strin
}
}

lines.Sort(new ComparerAnonymousHelper());
lines.Sort(Comparer<string[]>.Create((left, right) =>
{
int leftId = CharacterDefinition.LookupCharacterClass(left[0]);
int rightId = CharacterDefinition.LookupCharacterClass(right[0]);
return leftId - rightId;
}));

foreach (string[] entry in lines)
{
Expand All @@ -81,15 +86,6 @@ public virtual UnknownDictionaryWriter ReadDictionaryFile(string filename, strin

return dictionary;
}
private class ComparerAnonymousHelper : IComparer<string[]>
{
public int Compare(string[] left, string[] right)
{
int leftId = CharacterDefinition.LookupCharacterClass(left[0]);
int rightId = CharacterDefinition.LookupCharacterClass(right[0]);
return leftId - rightId;
}
}

public virtual void ReadCharacterDefinition(string filename, UnknownDictionaryWriter dictionary)
{
Expand Down
18 changes: 3 additions & 15 deletions src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
Original file line number Diff line number Diff line change
Expand Up @@ -303,20 +303,7 @@ public override IList<FacetResult> GetAllDims(int topN)
}

// Sort by highest count:
results.Sort(new ComparerAnonymousInnerClassHelper(this));
return results;
}

private class ComparerAnonymousInnerClassHelper : IComparer<FacetResult>
{
private readonly SortedSetDocValuesFacetCounts outerInstance;

public ComparerAnonymousInnerClassHelper(SortedSetDocValuesFacetCounts outerInstance)
{
this.outerInstance = outerInstance;
}

public virtual int Compare(FacetResult a, FacetResult b)
results.Sort(Comparer<FacetResult>.Create((a, b) =>
{
if ((int)a.Value > (int)b.Value)
{
Expand All @@ -330,7 +317,8 @@ public virtual int Compare(FacetResult a, FacetResult b)
{
return a.Dim.CompareToOrdinal(b.Dim);
}
}
}));
return results;
}
}
}
31 changes: 11 additions & 20 deletions src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,31 +28,22 @@ namespace Lucene.Net.Facet.Taxonomy
/// </summary>
public abstract class TaxonomyFacets : Facets
{
private static readonly IComparer<FacetResult> BY_VALUE_THEN_DIM = new ComparerAnonymousInnerClassHelper();

private class ComparerAnonymousInnerClassHelper : IComparer<FacetResult>
private static readonly IComparer<FacetResult> BY_VALUE_THEN_DIM = Comparer<FacetResult>.Create((a, b) =>
{
public ComparerAnonymousInnerClassHelper()
if (a.Value > b.Value)
{
return -1;
}

public virtual int Compare(FacetResult a, FacetResult b)
else if (b.Value > a.Value)
{
if (a.Value > b.Value)
{
return -1;
}
else if (b.Value > a.Value)
{
return 1;
}
else
{
return a.Dim.CompareToOrdinal(b.Dim);
}
return 1;
}
}

else
{
return a.Dim.CompareToOrdinal(b.Dim);
}
});

/// <summary>
/// Index field name provided to the constructor.
/// </summary>
Expand Down
47 changes: 17 additions & 30 deletions src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -584,34 +584,7 @@ private IDictionary<int, object> HighlightField(string field, string[] contents,

return highlights;
}

internal class HighlightDocComparerAnonymousHelper1 : IComparer<Passage>
{
public int Compare(Passage left, Passage right)
{
if (left.score < right.score)
{
return -1;
}
else if (left.score > right.score)
{
return 1;
}
else
{
return left.startOffset - right.startOffset;
}
}
}

internal class HighlightDocComparerAnonymousHelper2 : IComparer<Passage>
{
public int Compare(Passage left, Passage right)
{
return left.startOffset - right.startOffset;
}
}


// algorithm: treat sentence snippets as miniature documents
// we can intersect these with the postings lists via BreakIterator.preceding(offset),s
// score each sentence as norm(sentenceStartOffset) * sum(weight * tf(freq))
Expand Down Expand Up @@ -668,7 +641,21 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength

pq.Add(new OffsetsEnum(EMPTY, int.MaxValue)); // a sentinel for termination

JCG.PriorityQueue<Passage> passageQueue = new JCG.PriorityQueue<Passage>(n, new HighlightDocComparerAnonymousHelper1());
JCG.PriorityQueue<Passage> passageQueue = new JCG.PriorityQueue<Passage>(n, Comparer<Passage>.Create((left, right) =>
{
if (left.score < right.score)
{
return -1;
}
else if (left.score > right.score)
{
return 1;
}
else
{
return left.startOffset - right.startOffset;
}
}));
Passage current = new Passage();

while (pq.TryDequeue(out OffsetsEnum off))
Expand Down Expand Up @@ -722,7 +709,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength
p.Sort();
}
// sort in ascending order
ArrayUtil.TimSort(passages, new HighlightDocComparerAnonymousHelper2());
ArrayUtil.TimSort(passages, Comparer<Passage>.Create((left, right) => left.startOffset - right.startOffset));
return passages;
}
// advance breakiterator
Expand Down
Loading

0 comments on commit d90db02

Please sign in to comment.