Skip to content

Commit

Permalink
In the internal highlighter APIs, use the field type as opposed to th…
Browse files Browse the repository at this point in the history
…e mapper. (#31039)
  • Loading branch information
jtibshirani authored Jun 4, 2018
1 parent 53357e7 commit 609de08
Show file tree
Hide file tree
Showing 12 changed files with 109 additions and 107 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
Expand Down Expand Up @@ -71,9 +71,9 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
SearchContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
FieldMapper mapper = highlighterContext.mapper;
MappedFieldType fieldType = highlighterContext.fieldType;

if (canHighlight(mapper) == false) {
if (canHighlight(fieldType) == false) {
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName +
"] should be indexed with term vector with position offsets to be used with fast vector highlighter");
}
Expand All @@ -87,7 +87,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
HighlighterEntry cache = (HighlighterEntry) hitContext.cache().get(CACHE_KEY);

try {
MapperHighlightEntry entry = cache.mappers.get(mapper);
FieldHighlightEntry entry = cache.fields.get(fieldType);
if (entry == null) {
FragListBuilder fragListBuilder;
BaseFragmentsBuilder fragmentsBuilder;
Expand All @@ -97,37 +97,37 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
if (field.fieldOptions().numberOfFragments() == 0) {
fragListBuilder = new SingleFragListBuilder();

if (!forceSource && mapper.fieldType().stored()) {
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
if (!forceSource && fieldType.stored()) {
fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context,
fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, context,
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
} else {
fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ?
new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset());
if (field.fieldOptions().scoreOrdered()) {
if (!forceSource && mapper.fieldType().stored()) {
if (!forceSource && fieldType.stored()) {
fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context,
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, context,
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
} else {
if (!forceSource && mapper.fieldType().stored()) {
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
if (!forceSource && fieldType.stored()) {
fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder =
new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(),
new SourceSimpleFragmentsBuilder(fieldType, context, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
}
}
}
fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue);
entry = new MapperHighlightEntry();
entry = new FieldHighlightEntry();
if (field.fieldOptions().requireFieldMatch()) {
/**
* we use top level reader to rewrite the query against all readers,
Expand All @@ -152,7 +152,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter();
}
CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter());
cache.mappers.put(mapper, entry);
cache.fields.put(fieldType, entry);
}
final FieldQuery fieldQuery;
if (field.fieldOptions().requireFieldMatch()) {
Expand All @@ -173,12 +173,12 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
// Only send matched fields if they were requested to save time.
if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) {
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(),
mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize,
fieldType.name(), field.fieldOptions().matchedFields(), fragmentCharSize,
numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), encoder);
} else {
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(),
mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder,
fieldType.name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder,
entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
}

Expand All @@ -193,7 +193,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/);
fieldFragList.add(0, noMatchSize, Collections.<WeightedPhraseInfo>emptyList());
fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(),
mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(),
fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), encoder);
if (fragments != null && fragments.length > 0) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
Expand All @@ -209,9 +209,10 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
}

@Override
public boolean canHighlight(FieldMapper fieldMapper) {
return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets()
&& fieldMapper.fieldType().storeTermVectorPositions();
public boolean canHighlight(MappedFieldType fieldType) {
return fieldType.storeTermVectors()
&& fieldType.storeTermVectorOffsets()
&& fieldType.storeTermVectorPositions();
}

private static BoundaryScanner getBoundaryScanner(Field field) {
Expand Down Expand Up @@ -244,7 +245,7 @@ private static BoundaryScanner getBoundaryScanner(Field field) {
}
}

private class MapperHighlightEntry {
private class FieldHighlightEntry {
public FragListBuilder fragListBuilder;
public FragmentsBuilder fragmentsBuilder;
public FieldQuery noFieldMatchFieldQuery;
Expand All @@ -253,6 +254,6 @@ private class MapperHighlightEntry {

private class HighlighterEntry {
public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh;
public Map<FieldMapper, MapperHighlightEntry> mappers = new HashMap<>();
public Map<MappedFieldType, FieldHighlightEntry> fields = new HashMap<>();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.elasticsearch.index.analysis.CustomAnalyzer;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;

import java.util.Comparator;
import java.util.List;
Expand All @@ -47,10 +47,10 @@ private FragmentBuilderHelper() {
* Fixes problems with broken analysis chains if positions and offsets are messed up that can lead to
* {@link StringIndexOutOfBoundsException} in the {@link FastVectorHighlighter}
*/
public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) {
public static WeightedFragInfo fixWeightedFragInfo(MappedFieldType fieldType, Field[] values, WeightedFragInfo fragInfo) {
assert fragInfo != null : "FragInfo must not be null";
assert mapper.fieldType().name().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name();
if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(mapper.fieldType().indexAnalyzer())) {
assert fieldType.name().equals(values[0].name()) : "Expected MappedFieldType for field " + values[0].name();
if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(fieldType.indexAnalyzer())) {
/* This is a special case where broken analysis like WDF is used for term-vector creation at index-time
* which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort
* the fragments based on their offsets rather than using soley the positions as it is done in
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,16 @@
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;

import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
Expand Down Expand Up @@ -71,8 +69,8 @@ public void hitExecute(SearchContext context, HitContext hitContext) {

boolean fieldNameContainsWildcards = field.field().contains("*");
for (String fieldName : fieldNamesToHighlight) {
FieldMapper fieldMapper = getMapperForField(fieldName, context, hitContext);
if (fieldMapper == null) {
MappedFieldType fieldType = context.mapperService().fullName(fieldName);
if (fieldType == null) {
continue;
}

Expand All @@ -85,8 +83,8 @@ public void hitExecute(SearchContext context, HitContext hitContext) {
// If the field was explicitly given we assume that whoever issued the query knew
// what they were doing and try to highlight anyway.
if (fieldNameContainsWildcards) {
if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false &&
fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) {
if (fieldType.typeName().equals(TextFieldMapper.CONTENT_TYPE) == false &&
fieldType.typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) {
continue;
}
}
Expand All @@ -104,10 +102,10 @@ public void hitExecute(SearchContext context, HitContext hitContext) {
if (highlightQuery == null) {
highlightQuery = context.parsedQuery().query();
}
HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context,
hitContext, highlightQuery);
HighlighterContext highlighterContext = new HighlighterContext(fieldName,
field, fieldType, context, hitContext, highlightQuery);

if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) {
if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) {
// if several fieldnames matched the wildcard then we want to skip those that we cannot highlight
continue;
}
Expand All @@ -119,10 +117,4 @@ public void hitExecute(SearchContext context, HitContext hitContext) {
}
hitContext.hit().highlightFields(highlightFields);
}

private FieldMapper getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) {
DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().getType());
// TODO: no need to lookup the doc mapper with unambiguous field names? just look at the mapper service
return documentMapper.mappers().smartNameFieldMapper(fieldName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.lucene.search.highlight.Encoder;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
Expand All @@ -46,23 +46,25 @@ private HighlightUtils() {
/**
* Load field values for highlighting.
*/
public static List<Object> loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext,
FetchSubPhase.HitContext hitContext) throws IOException {
public static List<Object> loadFieldValues(SearchContextHighlight.Field field,
MappedFieldType fieldType,
SearchContext searchContext,
FetchSubPhase.HitContext hitContext) throws IOException {
//percolator needs to always load from source, thus it sets the global force source to true
boolean forceSource = searchContext.highlight().forceSource(field);
List<Object> textsToHighlight;
if (!forceSource && mapper.fieldType().stored()) {
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().name()), false);
if (!forceSource && fieldType.stored()) {
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false);
hitContext.reader().document(hitContext.docId(), fieldVisitor);
textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().name());
textsToHighlight = fieldVisitor.fields().get(fieldType.name());
if (textsToHighlight == null) {
// Can happen if the document doesn't have the field to highlight
textsToHighlight = Collections.emptyList();
}
} else {
SourceLookup sourceLookup = searchContext.lookup().source();
sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId());
textsToHighlight = sourceLookup.extractRawValues(mapper.fieldType().name());
textsToHighlight = sourceLookup.extractRawValues(fieldType.name());
}
assert textsToHighlight != null;
return textsToHighlight;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.fetch.subphase.highlight;

import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;

/**
* Highlights a search result.
Expand All @@ -27,5 +27,5 @@ public interface Highlighter {

HighlightField highlight(HighlighterContext highlighterContext);

boolean canHighlight(FieldMapper fieldMapper);
boolean canHighlight(MappedFieldType fieldType);
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,24 +19,28 @@
package org.elasticsearch.search.fetch.subphase.highlight;

import org.apache.lucene.search.Query;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;

public class HighlighterContext {

public final String fieldName;
public final SearchContextHighlight.Field field;
public final FieldMapper mapper;
public final MappedFieldType fieldType;
public final SearchContext context;
public final FetchSubPhase.HitContext hitContext;
public final Query query;

public HighlighterContext(String fieldName, SearchContextHighlight.Field field, FieldMapper mapper, SearchContext context,
FetchSubPhase.HitContext hitContext, Query query) {
public HighlighterContext(String fieldName,
SearchContextHighlight.Field field,
MappedFieldType fieldType,
SearchContext context,
FetchSubPhase.HitContext hitContext,
Query query) {
this.fieldName = fieldName;
this.field = field;
this.mapper = mapper;
this.fieldType = fieldType;
this.context = context;
this.hitContext = hitContext;
this.query = query;
Expand Down
Loading

0 comments on commit 609de08

Please sign in to comment.