Skip to content

Commit

Permalink
Refactor DocumentDimensions to RoutingFields
Browse files Browse the repository at this point in the history
  • Loading branch information
kkrik-es committed Nov 11, 2024
1 parent 64c362b commit b82d027
Show file tree
Hide file tree
Showing 23 changed files with 556 additions and 473 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.index.mapper.RoutingPathFields;
import org.elasticsearch.search.aggregations.AggregationReduceContext;
import org.elasticsearch.search.aggregations.AggregatorReducer;
import org.elasticsearch.search.aggregations.InternalAggregation;
Expand Down Expand Up @@ -68,7 +68,7 @@ public void writeTo(StreamOutput out) throws IOException {

@Override
public Map<String, Object> getKey() {
return TimeSeriesIdFieldMapper.decodeTsidAsMap(key);
return RoutingPathFields.decodeAsMap(key);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.index.mapper.RoutingPathFields;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
Expand Down Expand Up @@ -161,11 +161,11 @@ public void collect(int doc, long bucket) throws IOException {
if (currentTsidOrd == aggCtx.getTsidHashOrd()) {
tsid = currentTsid;
} else {
TimeSeriesIdFieldMapper.TimeSeriesIdBuilder tsidBuilder = new TimeSeriesIdFieldMapper.TimeSeriesIdBuilder(null);
RoutingPathFields routingPathFields = new RoutingPathFields(null);
for (TsidConsumer consumer : dimensionConsumers.values()) {
consumer.accept(doc, tsidBuilder);
consumer.accept(doc, routingPathFields);
}
currentTsid = tsid = tsidBuilder.buildLegacyTsid().toBytesRef();
currentTsid = tsid = routingPathFields.buildHash().toBytesRef();
}
long bucketOrdinal = bucketOrds.add(bucket, tsid);
if (bucketOrdinal < 0) { // already seen
Expand All @@ -189,6 +189,6 @@ InternalTimeSeries buildResult(InternalTimeSeries.InternalBucket[] topBuckets) {

@FunctionalInterface
interface TsidConsumer {
void accept(int docId, TimeSeriesIdFieldMapper.TimeSeriesIdBuilder tsidBuilder) throws IOException;
void accept(int docId, RoutingPathFields routingPathFields) throws IOException;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.common.util.MockPageCacheRecycler;
import org.elasticsearch.index.mapper.RoutingPathFields;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.aggregations.AggregationReduceContext;
Expand Down Expand Up @@ -42,12 +43,12 @@ private List<InternalBucket> randomBuckets(boolean keyed, InternalAggregations a
List<Map<String, Object>> keys = randomKeys(bucketKeys(randomIntBetween(1, 4)), numberOfBuckets);
for (int j = 0; j < numberOfBuckets; j++) {
long docCount = randomLongBetween(0, Long.MAX_VALUE / (20L * numberOfBuckets));
var builder = new TimeSeriesIdFieldMapper.TimeSeriesIdBuilder(null);
var routingPathFields = new RoutingPathFields(null);
for (var entry : keys.get(j).entrySet()) {
builder.addString(entry.getKey(), (String) entry.getValue());
routingPathFields.addString(entry.getKey(), (String) entry.getValue());
}
try {
var key = builder.buildLegacyTsid().toBytesRef();
var key = TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef();
bucketList.add(new InternalBucket(key, docCount, aggregations, keyed));
} catch (IOException e) {
throw new UncheckedIOException(e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.RoutingPathFields;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdBuilder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
Expand Down Expand Up @@ -93,10 +93,10 @@ public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimens
final List<IndexableField> fields = new ArrayList<>();
fields.add(new SortedNumericDocValuesField(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp));
fields.add(new LongPoint(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp));
final TimeSeriesIdBuilder builder = new TimeSeriesIdBuilder(null);
RoutingPathFields routingPathFields = new RoutingPathFields(null);
for (int i = 0; i < dimensions.length; i += 2) {
if (dimensions[i + 1] instanceof Number n) {
builder.addLong(dimensions[i].toString(), n.longValue());
routingPathFields.addLong(dimensions[i].toString(), n.longValue());
if (dimensions[i + 1] instanceof Integer || dimensions[i + 1] instanceof Long) {
fields.add(new NumericDocValuesField(dimensions[i].toString(), ((Number) dimensions[i + 1]).longValue()));
} else if (dimensions[i + 1] instanceof Float) {
Expand All @@ -105,7 +105,7 @@ public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimens
fields.add(new DoubleDocValuesField(dimensions[i].toString(), (double) dimensions[i + 1]));
}
} else {
builder.addString(dimensions[i].toString(), dimensions[i + 1].toString());
routingPathFields.addString(dimensions[i].toString(), dimensions[i + 1].toString());
fields.add(new SortedSetDocValuesField(dimensions[i].toString(), new BytesRef(dimensions[i + 1].toString())));
}
}
Expand All @@ -118,7 +118,9 @@ public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimens
fields.add(new DoubleDocValuesField(metrics[i].toString(), (double) metrics[i + 1]));
}
}
fields.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, builder.buildLegacyTsid().toBytesRef()));
fields.add(
new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef())
);
iw.addDocument(fields);
}

Expand Down
23 changes: 12 additions & 11 deletions server/src/main/java/org/elasticsearch/index/IndexMode.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DocumentDimensions;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
Expand All @@ -33,6 +32,8 @@
import org.elasticsearch.index.mapper.NestedLookup;
import org.elasticsearch.index.mapper.ProvidedIdFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.RoutingFields;
import org.elasticsearch.index.mapper.RoutingPathFields;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.index.mapper.TimeSeriesRoutingHashFieldMapper;
Expand Down Expand Up @@ -111,8 +112,8 @@ public IdFieldMapper buildIdFieldMapper(BooleanSupplier fieldDataEnabled) {
}

@Override
public DocumentDimensions buildDocumentDimensions(IndexSettings settings) {
return DocumentDimensions.Noop.INSTANCE;
public RoutingFields buildRoutingFields(IndexSettings settings) {
return RoutingFields.Noop.INSTANCE;
}

@Override
Expand Down Expand Up @@ -209,9 +210,9 @@ public IdFieldMapper buildIdFieldMapper(BooleanSupplier fieldDataEnabled) {
}

@Override
public DocumentDimensions buildDocumentDimensions(IndexSettings settings) {
public RoutingFields buildRoutingFields(IndexSettings settings) {
IndexRouting.ExtractFromSource routing = (IndexRouting.ExtractFromSource) settings.getIndexRouting();
return new TimeSeriesIdFieldMapper.TimeSeriesIdBuilder(routing.builder());
return new RoutingPathFields(routing.builder());
}

@Override
Expand Down Expand Up @@ -287,8 +288,8 @@ public MetadataFieldMapper timeSeriesRoutingHashFieldMapper() {
}

@Override
public DocumentDimensions buildDocumentDimensions(IndexSettings settings) {
return DocumentDimensions.Noop.INSTANCE;
public RoutingFields buildRoutingFields(IndexSettings settings) {
return RoutingFields.Noop.INSTANCE;
}

@Override
Expand Down Expand Up @@ -368,8 +369,8 @@ public IdFieldMapper buildIdFieldMapper(BooleanSupplier fieldDataEnabled) {
}

@Override
public DocumentDimensions buildDocumentDimensions(IndexSettings settings) {
return DocumentDimensions.Noop.INSTANCE;
public RoutingFields buildRoutingFields(IndexSettings settings) {
return RoutingFields.Noop.INSTANCE;
}

@Override
Expand Down Expand Up @@ -522,9 +523,9 @@ public String getName() {
public abstract MetadataFieldMapper timeSeriesRoutingHashFieldMapper();

/**
* How {@code time_series_dimension} fields are handled by indices in this mode.
* How routing fields are handled by indices in this mode.
*/
public abstract DocumentDimensions buildDocumentDimensions(IndexSettings settings);
public abstract RoutingFields buildRoutingFields(IndexSettings settings);

/**
* @return Whether timestamps should be validated for being withing the time range of an index.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ private void indexValue(DocumentParserContext context, Boolean value) {
}

if (fieldType().isDimension()) {
context.getDimensions().addBoolean(fieldType().name(), value).validate(context.indexSettings());
context.getRoutingFields().addBoolean(fieldType().name(), value);
}
if (indexed) {
context.doc().add(new StringField(fieldType().name(), value ? Values.TRUE : Values.FALSE, Field.Store.NO));
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ private enum Scope {
private final DynamicMapperSize dynamicMappersSize;
private final Map<String, ObjectMapper> dynamicObjectMappers;
private final Map<String, List<RuntimeField>> dynamicRuntimeFields;
private final DocumentDimensions dimensions;
private final RoutingFields dimensions;
private final ObjectMapper parent;
private final ObjectMapper.Dynamic dynamic;
private String id;
Expand Down Expand Up @@ -158,7 +158,7 @@ private DocumentParserContext(
String id,
Field version,
SeqNoFieldMapper.SequenceIDFields seqID,
DocumentDimensions dimensions,
RoutingFields dimensions,
ObjectMapper parent,
ObjectMapper.Dynamic dynamic,
Set<String> fieldsAppliedFromTemplates,
Expand Down Expand Up @@ -231,7 +231,7 @@ protected DocumentParserContext(
null,
null,
SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
DocumentDimensions.fromIndexSettings(mappingParserContext.getIndexSettings()),
RoutingFields.fromIndexSettings(mappingParserContext.getIndexSettings()),
parent,
dynamic,
new HashSet<>(),
Expand Down Expand Up @@ -760,9 +760,9 @@ public XContentParser parser() {
}

/**
* The collection of dimensions for this document.
* The collection of routing fields for this document.
*/
public DocumentDimensions getDimensions() {
public RoutingFields getRoutingFields() {
return dimensions;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio

private void indexValue(DocumentParserContext context, InetAddress address) {
if (dimension) {
context.getDimensions().addIp(fieldType().name(), address).validate(context.indexSettings());
context.getRoutingFields().addIp(fieldType().name(), address);
}
if (indexed) {
Field field = new InetAddressPoint(fieldType().name(), address);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -944,7 +944,7 @@ private void indexValue(DocumentParserContext context, String value) {
final BytesRef binaryValue = new BytesRef(value);

if (fieldType().isDimension()) {
context.getDimensions().addString(fieldType().name(), binaryValue).validate(context.indexSettings());
context.getRoutingFields().addString(fieldType().name(), binaryValue);
}

// If the UTF8 encoding of the field value is bigger than the max length 32766, Lucene fill fail the indexing request and, to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1991,7 +1991,7 @@ public Number value(XContentParser parser) throws IllegalArgumentException, IOEx
*/
public void indexValue(DocumentParserContext context, Number numericValue) {
if (dimension && numericValue != null) {
context.getDimensions().addLong(fieldType().name(), numericValue.longValue()).validate(context.indexSettings());
context.getRoutingFields().addLong(fieldType().name(), numericValue.longValue());
}
fieldType().type.addFields(context.doc(), fieldType().name(), numericValue, indexed, hasDocValues, stored);

Expand Down
Loading

0 comments on commit b82d027

Please sign in to comment.