files) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java
index 2c12c0abb202b..c649c4ab13e7e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java
@@ -74,11 +74,6 @@ public Settings restoreIndexDocRepSettings() {
return Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT).build();
}
- @Override
- protected boolean addMockInternalEngine() {
- return false;
- }
-
public void ingestData(int docCount, String indexName) throws Exception {
for (int i = 0; i < docCount; i++) {
client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet();
diff --git a/server/src/main/java/org/apache/lucene/util/LongHashSet.java b/server/src/main/java/org/apache/lucene/util/LongHashSet.java
new file mode 100644
index 0000000000000..a463e8a189585
--- /dev/null
+++ b/server/src/main/java/org/apache/lucene/util/LongHashSet.java
@@ -0,0 +1,136 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.apache.lucene.util;
+
+import org.apache.lucene.util.packed.PackedInts;
+
+import java.util.Arrays;
+import java.util.Objects;
+import java.util.stream.Collectors;
+import java.util.stream.LongStream;
+
+/** Set of longs, optimized for docvalues usage */
+public final class LongHashSet implements Accountable {
+ private static final long BASE_RAM_BYTES = RamUsageEstimator.shallowSizeOfInstance(LongHashSet.class);
+
+ private static final long MISSING = Long.MIN_VALUE;
+
+ final long[] table;
+ final int mask;
+ final boolean hasMissingValue;
+ final int size;
+ /** minimum value in the set, or Long.MAX_VALUE for an empty set */
+ public final long minValue;
+ /** maximum value in the set, or Long.MIN_VALUE for an empty set */
+ public final long maxValue;
+
+ /** Construct a set. Values must be in sorted order. */
+ public LongHashSet(long[] values) {
+ int tableSize = Math.toIntExact(values.length * 3L / 2);
+ tableSize = 1 << PackedInts.bitsRequired(tableSize); // make it a power of 2
+ assert tableSize >= values.length * 3L / 2;
+ table = new long[tableSize];
+ Arrays.fill(table, MISSING);
+ mask = tableSize - 1;
+ boolean hasMissingValue = false;
+ int size = 0;
+ long previousValue = Long.MIN_VALUE; // for assert
+ for (long value : values) {
+ if (value == MISSING) {
+ size += hasMissingValue ? 0 : 1;
+ hasMissingValue = true;
+ } else if (add(value)) {
+ ++size;
+ }
+ assert value >= previousValue : "values must be provided in sorted order";
+ previousValue = value;
+ }
+ this.hasMissingValue = hasMissingValue;
+ this.size = size;
+ this.minValue = values.length == 0 ? Long.MAX_VALUE : values[0];
+ this.maxValue = values.length == 0 ? Long.MIN_VALUE : values[values.length - 1];
+ }
+
+ private boolean add(long l) {
+ assert l != MISSING;
+ final int slot = Long.hashCode(l) & mask;
+ for (int i = slot;; i = (i + 1) & mask) {
+ if (table[i] == MISSING) {
+ table[i] = l;
+ return true;
+ } else if (table[i] == l) {
+ // already added
+ return false;
+ }
+ }
+ }
+
+ /**
+ * check for membership in the set.
+ *
+ * You should use {@link #minValue} and {@link #maxValue} to guide/terminate iteration before
+ * calling this.
+ */
+ public boolean contains(long l) {
+ if (l == MISSING) {
+ return hasMissingValue;
+ }
+ final int slot = Long.hashCode(l) & mask;
+ for (int i = slot;; i = (i + 1) & mask) {
+ if (table[i] == MISSING) {
+ return false;
+ } else if (table[i] == l) {
+ return true;
+ }
+ }
+ }
+
+ /** returns a stream of all values contained in this set */
+ LongStream stream() {
+ LongStream stream = Arrays.stream(table).filter(v -> v != MISSING);
+ if (hasMissingValue) {
+ stream = LongStream.concat(LongStream.of(MISSING), stream);
+ }
+ return stream;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(size, minValue, maxValue, mask, hasMissingValue, Arrays.hashCode(table));
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj != null && obj instanceof LongHashSet) {
+ LongHashSet that = (LongHashSet) obj;
+ return size == that.size
+ && minValue == that.minValue
+ && maxValue == that.maxValue
+ && mask == that.mask
+ && hasMissingValue == that.hasMissingValue
+ && Arrays.equals(table, that.table);
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return stream().mapToObj(String::valueOf).collect(Collectors.joining(", ", "[", "]"));
+ }
+
+ /** number of elements in the set */
+ public int size() {
+ return size;
+ }
+
+ @Override
+ public long ramBytesUsed() {
+ return BASE_RAM_BYTES + RamUsageEstimator.sizeOfObject(table);
+ }
+}
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java
index bc27c70282368..6ce62dda32d0a 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java
@@ -220,9 +220,11 @@ private static ObjectParser parser(String name, Supplier REMOVE_PARSER = parser(REMOVE.getPreferredName(), AliasActions::remove);
+ static {
+ REMOVE_PARSER.declareField(AliasActions::mustExist, XContentParser::booleanValue, MUST_EXIST, ValueType.BOOLEAN);
+ }
private static final ObjectParser REMOVE_INDEX_PARSER = parser(
REMOVE_INDEX.getPreferredName(),
AliasActions::removeIndex
@@ -554,6 +556,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
if (null != isHidden) {
builder.field(IS_HIDDEN.getPreferredName(), isHidden);
}
+ if (null != mustExist) {
+ builder.field(MUST_EXIST.getPreferredName(), mustExist);
+ }
builder.endObject();
builder.endObject();
return builder;
@@ -582,6 +587,8 @@ public String toString() {
+ searchRouting
+ ",writeIndex="
+ writeIndex
+ + ",mustExist="
+ + mustExist
+ "]";
}
@@ -600,12 +607,13 @@ public boolean equals(Object obj) {
&& Objects.equals(indexRouting, other.indexRouting)
&& Objects.equals(searchRouting, other.searchRouting)
&& Objects.equals(writeIndex, other.writeIndex)
- && Objects.equals(isHidden, other.isHidden);
+ && Objects.equals(isHidden, other.isHidden)
+ && Objects.equals(mustExist, other.mustExist);
}
@Override
public int hashCode() {
- return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex, isHidden);
+ return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex, isHidden, mustExist);
}
}
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java
index 769044f3dafb7..81cb3102cfcb9 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java
@@ -50,6 +50,7 @@
import org.opensearch.cluster.metadata.MetadataIndexAliasesService;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
+import org.opensearch.common.regex.Regex;
import org.opensearch.core.action.ActionListener;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.index.Index;
@@ -59,6 +60,7 @@
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@@ -220,12 +222,33 @@ private static String[] concreteAliases(IndicesAliasesRequest.AliasActions actio
// for DELETE we expand the aliases
String[] indexAsArray = { concreteIndex };
final Map> aliasMetadata = metadata.findAliases(action, indexAsArray);
- List finalAliases = new ArrayList<>();
+ Set finalAliases = new HashSet<>();
for (final List curAliases : aliasMetadata.values()) {
for (AliasMetadata aliasMeta : curAliases) {
finalAliases.add(aliasMeta.alias());
}
}
+
+ // must_exist can only be set in the Remove Action in Update aliases API,
+ // we check the value here to make the behavior consistent with Delete aliases API
+ if (action.mustExist() != null) {
+ // if must_exist is false, we should make the remove action execute silently,
+ // so we return the original specified aliases to avoid AliasesNotFoundException
+ if (!action.mustExist()) {
+ return action.aliases();
+ }
+
+ // if there is any non-existing aliases specified in the request and must_exist is true, throw exception in advance
+ if (finalAliases.isEmpty()) {
+ throw new AliasesNotFoundException(action.aliases());
+ }
+ String[] nonExistingAliases = Arrays.stream(action.aliases())
+ .filter(originalAlias -> finalAliases.stream().noneMatch(finalAlias -> Regex.simpleMatch(originalAlias, finalAlias)))
+ .toArray(String[]::new);
+ if (nonExistingAliases.length != 0) {
+ throw new AliasesNotFoundException(nonExistingAliases);
+ }
+ }
return finalAliases.toArray(new String[0]);
} else {
// for ADD and REMOVE_INDEX we just return the current aliases
diff --git a/server/src/main/java/org/opensearch/action/search/PitService.java b/server/src/main/java/org/opensearch/action/search/PitService.java
index ed12938883e48..b6480ce63f827 100644
--- a/server/src/main/java/org/opensearch/action/search/PitService.java
+++ b/server/src/main/java/org/opensearch/action/search/PitService.java
@@ -71,6 +71,7 @@ public void deletePitContexts(
) {
if (nodeToContextsMap.size() == 0) {
listener.onResponse(new DeletePitResponse(Collections.emptyList()));
+ return;
}
final Set clusters = nodeToContextsMap.values()
.stream()
diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java b/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java
new file mode 100644
index 0000000000000..607ccf182851b
--- /dev/null
+++ b/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java
@@ -0,0 +1,55 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.action.search;
+
+import org.opensearch.search.aggregations.AggregationBuilder;
+import org.opensearch.search.aggregations.PipelineAggregationBuilder;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+import java.util.Collection;
+
+/**
+ * Increments the counters related to Aggregation Search Queries.
+ */
+public class SearchQueryAggregationCategorizer {
+
+ private static final String TYPE_TAG = "type";
+ private final SearchQueryCounters searchQueryCounters;
+
+ public SearchQueryAggregationCategorizer(SearchQueryCounters searchQueryCounters) {
+ this.searchQueryCounters = searchQueryCounters;
+ }
+
+ public void incrementSearchQueryAggregationCounters(Collection aggregatorFactories) {
+ for (AggregationBuilder aggregationBuilder : aggregatorFactories) {
+ incrementCountersRecursively(aggregationBuilder);
+ }
+ }
+
+ private void incrementCountersRecursively(AggregationBuilder aggregationBuilder) {
+ // Increment counters for the current aggregation
+ String aggregationType = aggregationBuilder.getType();
+ searchQueryCounters.aggCounter.add(1, Tags.create().addTag(TYPE_TAG, aggregationType));
+
+ // Recursively process sub-aggregations if any
+ Collection subAggregations = aggregationBuilder.getSubAggregations();
+ if (subAggregations != null && !subAggregations.isEmpty()) {
+ for (AggregationBuilder subAggregation : subAggregations) {
+ incrementCountersRecursively(subAggregation);
+ }
+ }
+
+ // Process pipeline aggregations
+ Collection pipelineAggregations = aggregationBuilder.getPipelineAggregations();
+ for (PipelineAggregationBuilder pipelineAggregation : pipelineAggregations) {
+ String pipelineAggregationType = pipelineAggregation.getType();
+ searchQueryCounters.aggCounter.add(1, Tags.create().addTag(TYPE_TAG, pipelineAggregationType));
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java
index 8fe1be610f9af..ffaae5b08772f 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java
@@ -32,13 +32,15 @@ final class SearchQueryCategorizer {
final SearchQueryCounters searchQueryCounters;
+ final SearchQueryAggregationCategorizer searchQueryAggregationCategorizer;
+
public SearchQueryCategorizer(MetricsRegistry metricsRegistry) {
searchQueryCounters = new SearchQueryCounters(metricsRegistry);
+ searchQueryAggregationCategorizer = new SearchQueryAggregationCategorizer(searchQueryCounters);
}
public void categorize(SearchSourceBuilder source) {
QueryBuilder topLevelQueryBuilder = source.query();
-
logQueryShape(topLevelQueryBuilder);
incrementQueryTypeCounters(topLevelQueryBuilder);
incrementQueryAggregationCounters(source.aggregations());
@@ -56,9 +58,11 @@ private void incrementQuerySortCounters(List> sorts) {
}
private void incrementQueryAggregationCounters(AggregatorFactories.Builder aggregations) {
- if (aggregations != null) {
- searchQueryCounters.aggCounter.add(1);
+ if (aggregations == null) {
+ return;
}
+
+ searchQueryAggregationCategorizer.incrementSearchQueryAggregationCounters(aggregations.getAggregatorFactories());
}
private void incrementQueryTypeCounters(QueryBuilder topLevelQueryBuilder) {
diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java
index 98f0169e69a5c..31f83dbef9dc9 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java
@@ -9,26 +9,14 @@
package org.opensearch.action.search;
import org.apache.lucene.search.BooleanClause;
-import org.opensearch.index.query.BoolQueryBuilder;
-import org.opensearch.index.query.MatchPhraseQueryBuilder;
-import org.opensearch.index.query.MatchQueryBuilder;
-import org.opensearch.index.query.MultiMatchQueryBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilderVisitor;
-import org.opensearch.index.query.QueryStringQueryBuilder;
-import org.opensearch.index.query.RangeQueryBuilder;
-import org.opensearch.index.query.RegexpQueryBuilder;
-import org.opensearch.index.query.TermQueryBuilder;
-import org.opensearch.index.query.WildcardQueryBuilder;
-import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder;
-import org.opensearch.telemetry.metrics.tags.Tags;
/**
- * Class to visit the querybuilder tree and also track the level information.
+ * Class to visit the query builder tree and also track the level information.
* Increments the counters related to Search Query type.
*/
final class SearchQueryCategorizingVisitor implements QueryBuilderVisitor {
- private static final String LEVEL_TAG = "level";
private final int level;
private final SearchQueryCounters searchQueryCounters;
@@ -42,29 +30,7 @@ private SearchQueryCategorizingVisitor(SearchQueryCounters counters, int level)
}
public void accept(QueryBuilder qb) {
- if (qb instanceof BoolQueryBuilder) {
- searchQueryCounters.boolCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof FunctionScoreQueryBuilder) {
- searchQueryCounters.functionScoreCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof MatchQueryBuilder) {
- searchQueryCounters.matchCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof MatchPhraseQueryBuilder) {
- searchQueryCounters.matchPhrasePrefixCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof MultiMatchQueryBuilder) {
- searchQueryCounters.multiMatchCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof QueryStringQueryBuilder) {
- searchQueryCounters.queryStringQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof RangeQueryBuilder) {
- searchQueryCounters.rangeCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof RegexpQueryBuilder) {
- searchQueryCounters.regexCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof TermQueryBuilder) {
- searchQueryCounters.termCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else if (qb instanceof WildcardQueryBuilder) {
- searchQueryCounters.wildcardCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- } else {
- searchQueryCounters.otherQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
- }
+ searchQueryCounters.incrementCounter(qb, level);
}
public QueryBuilderVisitor getChildVisitor(BooleanClause.Occur occur) {
diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java
index 7e0259af07701..bbb883809b41b 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java
@@ -8,33 +8,82 @@
package org.opensearch.action.search;
+import org.opensearch.index.query.BoolQueryBuilder;
+import org.opensearch.index.query.BoostingQueryBuilder;
+import org.opensearch.index.query.ConstantScoreQueryBuilder;
+import org.opensearch.index.query.DisMaxQueryBuilder;
+import org.opensearch.index.query.DistanceFeatureQueryBuilder;
+import org.opensearch.index.query.ExistsQueryBuilder;
+import org.opensearch.index.query.FieldMaskingSpanQueryBuilder;
+import org.opensearch.index.query.FuzzyQueryBuilder;
+import org.opensearch.index.query.GeoBoundingBoxQueryBuilder;
+import org.opensearch.index.query.GeoDistanceQueryBuilder;
+import org.opensearch.index.query.GeoPolygonQueryBuilder;
+import org.opensearch.index.query.GeoShapeQueryBuilder;
+import org.opensearch.index.query.IntervalQueryBuilder;
+import org.opensearch.index.query.MatchAllQueryBuilder;
+import org.opensearch.index.query.MatchPhraseQueryBuilder;
+import org.opensearch.index.query.MatchQueryBuilder;
+import org.opensearch.index.query.MultiMatchQueryBuilder;
+import org.opensearch.index.query.PrefixQueryBuilder;
+import org.opensearch.index.query.QueryBuilder;
+import org.opensearch.index.query.QueryStringQueryBuilder;
+import org.opensearch.index.query.RangeQueryBuilder;
+import org.opensearch.index.query.RegexpQueryBuilder;
+import org.opensearch.index.query.ScriptQueryBuilder;
+import org.opensearch.index.query.SimpleQueryStringBuilder;
+import org.opensearch.index.query.TermQueryBuilder;
+import org.opensearch.index.query.WildcardQueryBuilder;
+import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.opensearch.telemetry.metrics.Counter;
import org.opensearch.telemetry.metrics.MetricsRegistry;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+import java.util.HashMap;
+import java.util.Map;
/**
* Class contains all the Counters related to search query types.
*/
final class SearchQueryCounters {
+ private static final String LEVEL_TAG = "level";
private static final String UNIT = "1";
private final MetricsRegistry metricsRegistry;
// Counters related to Query types
public final Counter aggCounter;
public final Counter boolCounter;
+ public final Counter boostingCounter;
+ public final Counter constantScoreCounter;
+ public final Counter disMaxCounter;
+ public final Counter distanceFeatureCounter;
+ public final Counter existsCounter;
+ public final Counter fieldMaskingSpanCounter;
public final Counter functionScoreCounter;
+ public final Counter fuzzyCounter;
+ public final Counter geoBoundingBoxCounter;
+ public final Counter geoDistanceCounter;
+ public final Counter geoPolygonCounter;
+ public final Counter geoShapeCounter;
+ public final Counter intervalCounter;
public final Counter matchCounter;
+ public final Counter matchallCounter;
public final Counter matchPhrasePrefixCounter;
public final Counter multiMatchCounter;
public final Counter otherQueryCounter;
- public final Counter queryStringQueryCounter;
+ public final Counter prefixCounter;
+ public final Counter queryStringCounter;
public final Counter rangeCounter;
- public final Counter regexCounter;
-
+ public final Counter regexpCounter;
+ public final Counter scriptCounter;
+ public final Counter simpleQueryStringCounter;
public final Counter sortCounter;
public final Counter skippedCounter;
public final Counter termCounter;
public final Counter totalCounter;
public final Counter wildcardCounter;
+ public final Counter numberOfInputFieldsCounter;
+ private final Map, Counter> queryHandlers;
public SearchQueryCounters(MetricsRegistry metricsRegistry) {
this.metricsRegistry = metricsRegistry;
@@ -48,16 +97,81 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) {
"Counter for the number of top level and nested bool search queries",
UNIT
);
+ this.boostingCounter = metricsRegistry.createCounter(
+ "search.query.type.boost.count",
+ "Counter for the number of top level and nested boost search queries",
+ UNIT
+ );
+ this.constantScoreCounter = metricsRegistry.createCounter(
+ "search.query.type.counstantscore.count",
+ "Counter for the number of top level and nested constant score search queries",
+ UNIT
+ );
+ this.disMaxCounter = metricsRegistry.createCounter(
+ "search.query.type.dismax.count",
+ "Counter for the number of top level and nested disjuntion max search queries",
+ UNIT
+ );
+ this.distanceFeatureCounter = metricsRegistry.createCounter(
+ "search.query.type.distancefeature.count",
+ "Counter for the number of top level and nested distance feature search queries",
+ UNIT
+ );
+ this.existsCounter = metricsRegistry.createCounter(
+ "search.query.type.exists.count",
+ "Counter for the number of top level and nested exists search queries",
+ UNIT
+ );
+ this.fieldMaskingSpanCounter = metricsRegistry.createCounter(
+ "search.query.type.fieldmaskingspan.count",
+ "Counter for the number of top level and nested field masking span search queries",
+ UNIT
+ );
this.functionScoreCounter = metricsRegistry.createCounter(
"search.query.type.functionscore.count",
"Counter for the number of top level and nested function score search queries",
UNIT
);
+ this.fuzzyCounter = metricsRegistry.createCounter(
+ "search.query.type.fuzzy.count",
+ "Counter for the number of top level and nested fuzzy search queries",
+ UNIT
+ );
+ this.geoBoundingBoxCounter = metricsRegistry.createCounter(
+ "search.query.type.geoboundingbox.count",
+ "Counter for the number of top level and nested geo bounding box queries",
+ UNIT
+ );
+ this.geoDistanceCounter = metricsRegistry.createCounter(
+ "search.query.type.geodistance.count",
+ "Counter for the number of top level and nested geo distance queries",
+ UNIT
+ );
+ this.geoPolygonCounter = metricsRegistry.createCounter(
+ "search.query.type.geopolygon.count",
+ "Counter for the number of top level and nested geo polygon queries",
+ UNIT
+ );
+ this.geoShapeCounter = metricsRegistry.createCounter(
+ "search.query.type.geoshape.count",
+ "Counter for the number of top level and nested geo shape queries",
+ UNIT
+ );
+ this.intervalCounter = metricsRegistry.createCounter(
+ "search.query.type.interval.count",
+ "Counter for the number of top level and nested interval queries",
+ UNIT
+ );
this.matchCounter = metricsRegistry.createCounter(
"search.query.type.match.count",
"Counter for the number of top level and nested match search queries",
UNIT
);
+ this.matchallCounter = metricsRegistry.createCounter(
+ "search.query.type.matchall.count",
+ "Counter for the number of top level and nested match all search queries",
+ UNIT
+ );
this.matchPhrasePrefixCounter = metricsRegistry.createCounter(
"search.query.type.matchphrase.count",
"Counter for the number of top level and nested match phrase prefix search queries",
@@ -73,7 +187,12 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) {
"Counter for the number of top level and nested search queries that do not match any other categories",
UNIT
);
- this.queryStringQueryCounter = metricsRegistry.createCounter(
+ this.prefixCounter = metricsRegistry.createCounter(
+ "search.query.type.prefix.count",
+ "Counter for the number of top level and nested search queries that match prefix queries",
+ UNIT
+ );
+ this.queryStringCounter = metricsRegistry.createCounter(
"search.query.type.querystringquery.count",
"Counter for the number of top level and nested queryStringQuery search queries",
UNIT
@@ -83,11 +202,21 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) {
"Counter for the number of top level and nested range search queries",
UNIT
);
- this.regexCounter = metricsRegistry.createCounter(
+ this.regexpCounter = metricsRegistry.createCounter(
"search.query.type.regex.count",
"Counter for the number of top level and nested regex search queries",
UNIT
);
+ this.scriptCounter = metricsRegistry.createCounter(
+ "search.query.type.script.count",
+ "Counter for the number of top level and nested script search queries",
+ UNIT
+ );
+ this.simpleQueryStringCounter = metricsRegistry.createCounter(
+ "search.query.type.simplequerystring.count",
+ "Counter for the number of top level and nested script simple query string search queries",
+ UNIT
+ );
this.skippedCounter = metricsRegistry.createCounter(
"search.query.type.skipped.count",
"Counter for the number queries skipped due to error",
@@ -113,5 +242,51 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) {
"Counter for the number of top level and nested wildcard search queries",
UNIT
);
+ this.numberOfInputFieldsCounter = metricsRegistry.createCounter(
+ "search.query.type.numberofinputfields.count",
+ "Counter for the number of input fields in the search queries",
+ UNIT
+ );
+ this.queryHandlers = new HashMap<>();
+ initializeQueryHandlers();
+ }
+
+ public void incrementCounter(QueryBuilder queryBuilder, int level) {
+ Counter counter = queryHandlers.get(queryBuilder.getClass());
+ if (counter != null) {
+ counter.add(1, Tags.create().addTag(LEVEL_TAG, level));
+ } else {
+ otherQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level));
+ }
+ }
+
+ private void initializeQueryHandlers() {
+
+ queryHandlers.put(BoolQueryBuilder.class, boolCounter);
+ queryHandlers.put(FunctionScoreQueryBuilder.class, functionScoreCounter);
+ queryHandlers.put(MatchQueryBuilder.class, matchCounter);
+ queryHandlers.put(MatchPhraseQueryBuilder.class, matchPhrasePrefixCounter);
+ queryHandlers.put(MultiMatchQueryBuilder.class, multiMatchCounter);
+ queryHandlers.put(QueryStringQueryBuilder.class, queryStringCounter);
+ queryHandlers.put(RangeQueryBuilder.class, rangeCounter);
+ queryHandlers.put(RegexpQueryBuilder.class, regexpCounter);
+ queryHandlers.put(TermQueryBuilder.class, termCounter);
+ queryHandlers.put(WildcardQueryBuilder.class, wildcardCounter);
+ queryHandlers.put(BoostingQueryBuilder.class, boostingCounter);
+ queryHandlers.put(ConstantScoreQueryBuilder.class, constantScoreCounter);
+ queryHandlers.put(DisMaxQueryBuilder.class, disMaxCounter);
+ queryHandlers.put(DistanceFeatureQueryBuilder.class, distanceFeatureCounter);
+ queryHandlers.put(ExistsQueryBuilder.class, existsCounter);
+ queryHandlers.put(FieldMaskingSpanQueryBuilder.class, fieldMaskingSpanCounter);
+ queryHandlers.put(FuzzyQueryBuilder.class, fuzzyCounter);
+ queryHandlers.put(GeoBoundingBoxQueryBuilder.class, geoBoundingBoxCounter);
+ queryHandlers.put(GeoDistanceQueryBuilder.class, geoDistanceCounter);
+ queryHandlers.put(GeoPolygonQueryBuilder.class, geoPolygonCounter);
+ queryHandlers.put(GeoShapeQueryBuilder.class, geoShapeCounter);
+ queryHandlers.put(IntervalQueryBuilder.class, intervalCounter);
+ queryHandlers.put(MatchAllQueryBuilder.class, matchallCounter);
+ queryHandlers.put(PrefixQueryBuilder.class, prefixCounter);
+ queryHandlers.put(ScriptQueryBuilder.class, scriptCounter);
+ queryHandlers.put(SimpleQueryStringBuilder.class, simpleQueryStringCounter);
}
}
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java
index 47c6cad04343e..dac6ecaa13781 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java
@@ -35,6 +35,7 @@
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.opensearch.common.Nullable;
import org.opensearch.core.common.Strings;
+import org.opensearch.rest.action.admin.indices.AliasesNotFoundException;
/**
* Individual operation to perform on the cluster state as part of an {@link IndicesAliasesRequest}.
@@ -225,8 +226,8 @@ boolean removeIndex() {
@Override
boolean apply(NewAliasValidator aliasValidator, Metadata.Builder metadata, IndexMetadata index) {
if (false == index.getAliases().containsKey(alias)) {
- if (mustExist != null && mustExist.booleanValue()) {
- throw new IllegalArgumentException("required alias [" + alias + "] does not exist");
+ if (mustExist != null && mustExist) {
+ throw new AliasesNotFoundException(alias);
}
return false;
}
diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java
index 387b0c9753574..d3285c379bcc4 100644
--- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java
+++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java
@@ -40,7 +40,8 @@ protected FeatureFlagSettings(
FeatureFlags.IDENTITY_SETTING,
FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING,
FeatureFlags.TELEMETRY_SETTING,
- FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING
+ FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING,
+ FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING
)
)
);
diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java
index 4e9b417e3433b..c54772caa574b 100644
--- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java
+++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java
@@ -60,6 +60,12 @@ public class FeatureFlags {
*/
public static final String DATETIME_FORMATTER_CACHING = "opensearch.experimental.optimization.datetime_formatter_caching.enabled";
+ /**
+ * Gates the functionality of writeable remote index
+ * Once the feature is ready for release, this feature flag can be removed.
+ */
+ public static final String WRITEABLE_REMOTE_INDEX = "opensearch.experimental.feature.writeable_remote_index.enabled";
+
/**
* Should store the settings from opensearch.yml.
*/
@@ -122,4 +128,10 @@ public static boolean isEnabled(Setting featureFlag) {
true,
Property.NodeScope
);
+
+ public static final Setting WRITEABLE_REMOTE_INDEX_SETTING = Setting.boolSetting(
+ WRITEABLE_REMOTE_INDEX,
+ false,
+ Property.NodeScope
+ );
}
diff --git a/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java b/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java
index 9e81d8a7af078..9b2bd06a88e2e 100644
--- a/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java
+++ b/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java
@@ -18,7 +18,6 @@
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentLocation;
import org.opensearch.core.xcontent.XContentParser;
-import org.opensearch.index.mapper.ParseContext;
import java.io.IOException;
import java.math.BigInteger;
@@ -40,7 +39,6 @@ public class JsonToStringXContentParser extends AbstractXContentParser {
private ArrayList keyList = new ArrayList<>();
private XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent);
- private ParseContext parseContext;
private NamedXContentRegistry xContentRegistry;
@@ -54,14 +52,13 @@ public class JsonToStringXContentParser extends AbstractXContentParser {
public JsonToStringXContentParser(
NamedXContentRegistry xContentRegistry,
DeprecationHandler deprecationHandler,
- ParseContext parseContext,
+ XContentParser parser,
String fieldTypeName
) throws IOException {
super(xContentRegistry, deprecationHandler);
- this.parseContext = parseContext;
this.deprecationHandler = deprecationHandler;
this.xContentRegistry = xContentRegistry;
- this.parser = parseContext.parser();
+ this.parser = parser;
this.fieldTypeName = fieldTypeName;
}
@@ -86,8 +83,22 @@ private void parseToken(StringBuilder path, String currentFieldName) throws IOEx
StringBuilder parsedFields = new StringBuilder();
if (this.parser.currentToken() == Token.FIELD_NAME) {
- path.append(DOT_SYMBOL + currentFieldName);
- this.keyList.add(currentFieldName);
+ path.append(DOT_SYMBOL).append(currentFieldName);
+ int dotIndex = currentFieldName.indexOf(DOT_SYMBOL);
+ String fieldNameSuffix = currentFieldName;
+ // The field name may be of the form foo.bar.baz
+ // If that's the case, each "part" is a key.
+ while (dotIndex >= 0) {
+ String fieldNamePrefix = fieldNameSuffix.substring(0, dotIndex);
+ if (!fieldNamePrefix.isEmpty()) {
+ this.keyList.add(fieldNamePrefix);
+ }
+ fieldNameSuffix = fieldNameSuffix.substring(dotIndex + 1);
+ dotIndex = fieldNameSuffix.indexOf(DOT_SYMBOL);
+ }
+ if (!fieldNameSuffix.isEmpty()) {
+ this.keyList.add(fieldNameSuffix);
+ }
} else if (this.parser.currentToken() == Token.START_ARRAY) {
parseToken(path, currentFieldName);
break;
@@ -97,18 +108,18 @@ private void parseToken(StringBuilder path, String currentFieldName) throws IOEx
parseToken(path, currentFieldName);
int dotIndex = path.lastIndexOf(DOT_SYMBOL);
if (dotIndex != -1) {
- path.delete(dotIndex, path.length());
+ path.setLength(path.length() - currentFieldName.length() - 1);
}
} else {
if (!path.toString().contains(currentFieldName)) {
- path.append(DOT_SYMBOL + currentFieldName);
+ path.append(DOT_SYMBOL).append(currentFieldName);
}
parseValue(parsedFields);
this.valueList.add(parsedFields.toString());
this.valueAndPathList.add(path + EQUAL_SYMBOL + parsedFields);
int dotIndex = path.lastIndexOf(DOT_SYMBOL);
if (dotIndex != -1) {
- path.delete(dotIndex, path.length());
+ path.setLength(path.length() - currentFieldName.length() - 1);
}
}
diff --git a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java
new file mode 100644
index 0000000000000..669dbb1e1bfc7
--- /dev/null
+++ b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java
@@ -0,0 +1,176 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.index.document;
+
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.search.ConstantScoreScorer;
+import org.apache.lucene.search.ConstantScoreWeight;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchNoDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryVisitor;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.TwoPhaseIterator;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.util.LongHashSet;
+
+import java.io.IOException;
+import java.math.BigInteger;
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * The {@link org.apache.lucene.document.SortedNumericDocValuesSetQuery} implementation for unsigned long numeric data type.
+ *
+ * @opensearch.internal
+ */
+public abstract class SortedUnsignedLongDocValuesSetQuery extends Query {
+
+ private final String field;
+ private final LongHashSet numbers;
+
+ SortedUnsignedLongDocValuesSetQuery(String field, BigInteger[] numbers) {
+ this.field = Objects.requireNonNull(field);
+ Arrays.sort(numbers);
+ this.numbers = new LongHashSet(Arrays.stream(numbers).mapToLong(n -> n.longValue()).toArray());
+ }
+
+ @Override
+ public String toString(String field) {
+ return new StringBuilder().append(field).append(": ").append(numbers.toString()).toString();
+ }
+
+ @Override
+ public void visit(QueryVisitor visitor) {
+ if (visitor.acceptField(field)) {
+ visitor.visitLeaf(this);
+ }
+ }
+
+ @Override
+ public Query rewrite(IndexSearcher indexSearcher) throws IOException {
+ if (numbers.size() == 0) {
+ return new MatchNoDocsQuery();
+ }
+ return super.rewrite(indexSearcher);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (sameClassAs(other) == false) {
+ return false;
+ }
+ SortedUnsignedLongDocValuesSetQuery that = (SortedUnsignedLongDocValuesSetQuery) other;
+ return field.equals(that.field) && numbers.equals(that.numbers);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(classHash(), field, numbers);
+ }
+
+ abstract SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException;
+
+ @Override
+ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
+ return new ConstantScoreWeight(this, boost) {
+
+ @Override
+ public boolean isCacheable(LeafReaderContext ctx) {
+ return DocValues.isCacheable(ctx, field);
+ }
+
+ @Override
+ public Scorer scorer(LeafReaderContext context) throws IOException {
+ SortedNumericDocValues values = getValues(context.reader(), field);
+ if (values == null) {
+ return null;
+ }
+ final NumericDocValues singleton = DocValues.unwrapSingleton(values);
+ final TwoPhaseIterator iterator;
+ if (singleton != null) {
+ iterator = new TwoPhaseIterator(singleton) {
+ @Override
+ public boolean matches() throws IOException {
+ long value = singleton.longValue();
+ return Long.compareUnsigned(value, numbers.minValue) >= 0
+ && Long.compareUnsigned(value, numbers.maxValue) <= 0
+ && numbers.contains(value);
+ }
+
+ @Override
+ public float matchCost() {
+ return 5; // 2 comparisions, possible lookup in the set
+ }
+ };
+ } else {
+ iterator = new TwoPhaseIterator(values) {
+ @Override
+ public boolean matches() throws IOException {
+ int count = values.docValueCount();
+ for (int i = 0; i < count; i++) {
+ final long value = values.nextValue();
+ if (Long.compareUnsigned(value, numbers.minValue) < 0) {
+ continue;
+ } else if (Long.compareUnsigned(value, numbers.maxValue) > 0) {
+ return false; // values are sorted, terminate
+ } else if (numbers.contains(value)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public float matchCost() {
+ return 5; // 2 comparisons, possible lookup in the set
+ }
+ };
+ }
+ return new ConstantScoreScorer(this, score(), scoreMode, iterator);
+ }
+ };
+ }
+
+ public static Query newSlowSetQuery(String field, BigInteger... values) {
+ return new SortedUnsignedLongDocValuesSetQuery(field, values) {
+ @Override
+ SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException {
+ FieldInfo info = reader.getFieldInfos().fieldInfo(field);
+ if (info == null) {
+ // Queries have some optimizations when one sub scorer returns null rather
+ // than a scorer that does not match any documents
+ return null;
+ }
+ return DocValues.getSortedNumeric(reader, field);
+ }
+ };
+ }
+
+ public static Query newSlowExactQuery(String field, BigInteger value) {
+ return new SortedUnsignedLongDocValuesRangeQuery(field, value, value) {
+ @Override
+ SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException {
+ FieldInfo info = reader.getFieldInfos().fieldInfo(field);
+ if (info == null) {
+ // Queries have some optimizations when one sub scorer returns null rather
+ // than a scorer that does not match any documents
+ return null;
+ }
+ return DocValues.getSortedNumeric(reader, field);
+ }
+ };
+ }
+}
diff --git a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java
index 00b623dddac23..9a3f2595a7c9e 100644
--- a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java
+++ b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java
@@ -572,7 +572,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
JsonToStringXContentParser JsonToStringParser = new JsonToStringXContentParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.IGNORE_DEPRECATIONS,
- context,
+ context.parser(),
fieldType().name()
);
/*
diff --git a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java
index da62ddfd7017d..66d4654e543a2 100644
--- a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java
+++ b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java
@@ -359,18 +359,31 @@ public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionInc
);
}
+ public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, QueryShardContext context) throws IOException {
+ return phraseQuery(stream, slop, enablePositionIncrements);
+ }
+
public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException {
throw new IllegalArgumentException(
"Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"
);
}
+ public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, QueryShardContext context)
+ throws IOException {
+ return multiPhraseQuery(stream, slop, enablePositionIncrements);
+ }
+
public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException {
throw new IllegalArgumentException(
"Can only use phrase prefix queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"
);
}
+ public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, QueryShardContext context) throws IOException {
+ return phrasePrefixQuery(stream, slop, maxExpansions);
+ }
+
public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) {
throw new IllegalArgumentException(
"Can only use span prefix queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"
diff --git a/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java
new file mode 100644
index 0000000000000..fb97f8c309a70
--- /dev/null
+++ b/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java
@@ -0,0 +1,312 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.index.mapper;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
+import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MultiPhraseQuery;
+import org.apache.lucene.search.PhraseQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.opensearch.Version;
+import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery;
+import org.opensearch.index.analysis.IndexAnalyzers;
+import org.opensearch.index.analysis.NamedAnalyzer;
+import org.opensearch.index.query.QueryShardContext;
+import org.opensearch.index.query.SourceFieldMatchQuery;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+
+/**
+ * A specialized type of TextFieldMapper which disables the positions and norms to save on storage and executes phrase queries, which requires
+ * positional data, in a slightly less efficient manner using the {@link org.opensearch.index.query.SourceFieldMatchQuery}.
+ */
+public class MatchOnlyTextFieldMapper extends TextFieldMapper {
+
+ public static final FieldType FIELD_TYPE = new FieldType();
+ public static final String CONTENT_TYPE = "match_only_text";
+ private final String indexOptions = FieldMapper.indexOptionToString(FIELD_TYPE.indexOptions());
+ private final boolean norms = FIELD_TYPE.omitNorms() == false;
+
+ @Override
+ protected String contentType() {
+ return CONTENT_TYPE;
+ }
+
+ static {
+ FIELD_TYPE.setTokenized(true);
+ FIELD_TYPE.setStored(false);
+ FIELD_TYPE.setStoreTermVectors(false);
+ FIELD_TYPE.setOmitNorms(true);
+ FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
+ FIELD_TYPE.freeze();
+ }
+
+ public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers()));
+
+ protected MatchOnlyTextFieldMapper(
+ String simpleName,
+ FieldType fieldType,
+ MatchOnlyTextFieldType mappedFieldType,
+ TextFieldMapper.PrefixFieldMapper prefixFieldMapper,
+ TextFieldMapper.PhraseFieldMapper phraseFieldMapper,
+ MultiFields multiFields,
+ CopyTo copyTo,
+ Builder builder
+ ) {
+
+ super(simpleName, fieldType, mappedFieldType, prefixFieldMapper, phraseFieldMapper, multiFields, copyTo, builder);
+ }
+
+ @Override
+ public ParametrizedFieldMapper.Builder getMergeBuilder() {
+ return new Builder(simpleName(), this.indexCreatedVersion, this.indexAnalyzers).init(this);
+ }
+
+ /**
+ * Builder class for constructing the MatchOnlyTextFieldMapper.
+ */
+ public static class Builder extends TextFieldMapper.Builder {
+ final Parameter indexOptions = indexOptions(m -> ((MatchOnlyTextFieldMapper) m).indexOptions);
+
+ private static Parameter indexOptions(Function initializer) {
+ return Parameter.restrictedStringParam("index_options", false, initializer, "docs");
+ }
+
+ final Parameter norms = norms(m -> ((MatchOnlyTextFieldMapper) m).norms);
+ final Parameter indexPhrases = Parameter.boolParam(
+ "index_phrases",
+ false,
+ m -> ((MatchOnlyTextFieldType) m.mappedFieldType).indexPhrases,
+ false
+ ).setValidator(v -> {
+ if (v == true) {
+ throw new MapperParsingException("Index phrases cannot be enabled on for match_only_text field. Use text field instead");
+ }
+ });
+
+ final Parameter indexPrefixes = new Parameter<>(
+ "index_prefixes",
+ false,
+ () -> null,
+ TextFieldMapper::parsePrefixConfig,
+ m -> Optional.ofNullable(((MatchOnlyTextFieldType) m.mappedFieldType).prefixFieldType)
+ .map(p -> new PrefixConfig(p.minChars, p.maxChars))
+ .orElse(null)
+ ).acceptsNull().setValidator(v -> {
+ if (v != null) {
+ throw new MapperParsingException("Index prefixes cannot be enabled on for match_only_text field. Use text field instead");
+ }
+ });
+
+ private static Parameter norms(Function initializer) {
+ return Parameter.boolParam("norms", false, initializer, false)
+ .setMergeValidator((o, n) -> o == n || (o && n == false))
+ .setValidator(v -> {
+ if (v == true) {
+ throw new MapperParsingException("Norms cannot be enabled on for match_only_text field");
+ }
+ });
+ }
+
+ public Builder(String name, IndexAnalyzers indexAnalyzers) {
+ super(name, indexAnalyzers);
+ }
+
+ public Builder(String name, Version indexCreatedVersion, IndexAnalyzers indexAnalyzers) {
+ super(name, indexCreatedVersion, indexAnalyzers);
+ }
+
+ @Override
+ public MatchOnlyTextFieldMapper build(BuilderContext context) {
+ FieldType fieldType = TextParams.buildFieldType(index, store, indexOptions, norms, termVectors);
+ MatchOnlyTextFieldType tft = buildFieldType(fieldType, context);
+ return new MatchOnlyTextFieldMapper(
+ name,
+ fieldType,
+ tft,
+ buildPrefixMapper(context, fieldType, tft),
+ buildPhraseMapper(fieldType, tft),
+ multiFieldsBuilder.build(this, context),
+ copyTo.build(),
+ this
+ );
+ }
+
+ @Override
+ protected MatchOnlyTextFieldType buildFieldType(FieldType fieldType, BuilderContext context) {
+ NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer();
+ NamedAnalyzer searchAnalyzer = analyzers.getSearchAnalyzer();
+ NamedAnalyzer searchQuoteAnalyzer = analyzers.getSearchQuoteAnalyzer();
+
+ if (fieldType.indexOptions().compareTo(IndexOptions.DOCS) > 0) {
+ throw new IllegalArgumentException("Cannot set position_increment_gap on field [" + name + "] without positions enabled");
+ }
+ if (positionIncrementGap.get() != POSITION_INCREMENT_GAP_USE_ANALYZER) {
+ if (fieldType.indexOptions().compareTo(IndexOptions.DOCS) < 0) {
+ throw new IllegalArgumentException(
+ "Cannot set position_increment_gap on field [" + name + "] without indexing enabled"
+ );
+ }
+ indexAnalyzer = new NamedAnalyzer(indexAnalyzer, positionIncrementGap.get());
+ searchAnalyzer = new NamedAnalyzer(searchAnalyzer, positionIncrementGap.get());
+ searchQuoteAnalyzer = new NamedAnalyzer(searchQuoteAnalyzer, positionIncrementGap.get());
+ }
+ TextSearchInfo tsi = new TextSearchInfo(fieldType, similarity.getValue(), searchAnalyzer, searchQuoteAnalyzer);
+ MatchOnlyTextFieldType ft = new MatchOnlyTextFieldType(
+ buildFullName(context),
+ index.getValue(),
+ fieldType.stored(),
+ tsi,
+ meta.getValue()
+ );
+ ft.setIndexAnalyzer(indexAnalyzer);
+ ft.setEagerGlobalOrdinals(eagerGlobalOrdinals.getValue());
+ ft.setBoost(boost.getValue());
+ if (fieldData.getValue()) {
+ ft.setFielddata(true, freqFilter.getValue());
+ }
+ return ft;
+ }
+
+ @Override
+ protected List> getParameters() {
+ return Arrays.asList(
+ index,
+ store,
+ indexOptions,
+ norms,
+ termVectors,
+ analyzers.indexAnalyzer,
+ analyzers.searchAnalyzer,
+ analyzers.searchQuoteAnalyzer,
+ similarity,
+ positionIncrementGap,
+ fieldData,
+ freqFilter,
+ eagerGlobalOrdinals,
+ indexPhrases,
+ indexPrefixes,
+ boost,
+ meta
+ );
+ }
+ }
+
+ /**
+ * The specific field type for MatchOnlyTextFieldMapper
+ *
+ * @opensearch.internal
+ */
+ public static final class MatchOnlyTextFieldType extends TextFieldType {
+ private final boolean indexPhrases = false;
+
+ private PrefixFieldType prefixFieldType;
+
+ @Override
+ public String typeName() {
+ return CONTENT_TYPE;
+ }
+
+ public MatchOnlyTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) {
+ super(name, indexed, stored, tsi, meta);
+ }
+
+ @Override
+ public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, QueryShardContext context) throws IOException {
+ PhraseQuery phraseQuery = (PhraseQuery) super.phraseQuery(stream, slop, enablePosIncrements);
+ BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ for (Term term : phraseQuery.getTerms()) {
+ builder.add(new TermQuery(term), BooleanClause.Occur.FILTER);
+ }
+ return new SourceFieldMatchQuery(builder.build(), phraseQuery, this, context);
+ }
+
+ @Override
+ public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, QueryShardContext context)
+ throws IOException {
+ MultiPhraseQuery multiPhraseQuery = (MultiPhraseQuery) super.multiPhraseQuery(stream, slop, enablePositionIncrements);
+ BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ for (Term[] terms : multiPhraseQuery.getTermArrays()) {
+ if (terms.length > 1) {
+ // Multiple terms in the same position, creating a disjunction query for it and
+ // adding it to conjunction query
+ BooleanQuery.Builder disjunctions = new BooleanQuery.Builder();
+ for (Term term : terms) {
+ disjunctions.add(new TermQuery(term), BooleanClause.Occur.SHOULD);
+ }
+ builder.add(disjunctions.build(), BooleanClause.Occur.FILTER);
+ } else {
+ builder.add(new TermQuery(terms[0]), BooleanClause.Occur.FILTER);
+ }
+ }
+ return new SourceFieldMatchQuery(builder.build(), multiPhraseQuery, this, context);
+ }
+
+ @Override
+ public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, QueryShardContext context) throws IOException {
+ Query phrasePrefixQuery = super.phrasePrefixQuery(stream, slop, maxExpansions);
+ List> termArray = getTermsFromTokenStream(stream);
+ BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ for (int i = 0; i < termArray.size(); i++) {
+ if (i == termArray.size() - 1) {
+ // last element of the term Array is a prefix, thus creating a prefix query for it and adding it to
+ // conjunction query
+ MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery(name());
+ mqb.add(termArray.get(i).toArray(new Term[0]));
+ builder.add(mqb, BooleanClause.Occur.FILTER);
+ } else {
+ if (termArray.get(i).size() > 1) {
+ // multiple terms in the same position, creating a disjunction query for it and
+ // adding it to conjunction query
+ BooleanQuery.Builder disjunctions = new BooleanQuery.Builder();
+ for (Term term : termArray.get(i)) {
+ disjunctions.add(new TermQuery(term), BooleanClause.Occur.SHOULD);
+ }
+ builder.add(disjunctions.build(), BooleanClause.Occur.FILTER);
+ } else {
+ builder.add(new TermQuery(termArray.get(i).get(0)), BooleanClause.Occur.FILTER);
+ }
+ }
+ }
+ return new SourceFieldMatchQuery(builder.build(), phrasePrefixQuery, this, context);
+ }
+
+ private List> getTermsFromTokenStream(TokenStream stream) throws IOException {
+ final List> termArray = new ArrayList<>();
+ TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
+ PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
+ List currentTerms = new ArrayList<>();
+ stream.reset();
+ while (stream.incrementToken()) {
+ if (posIncrAtt.getPositionIncrement() != 0) {
+ if (currentTerms.isEmpty() == false) {
+ termArray.add(List.copyOf(currentTerms));
+ }
+ currentTerms.clear();
+ }
+ currentTerms.add(new Term(name(), termAtt.getBytesRef()));
+ }
+ termArray.add(List.copyOf(currentTerms));
+ return termArray;
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java
index 204e7bc4c16ab..524d2b0e0dd38 100644
--- a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java
+++ b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java
@@ -37,6 +37,7 @@
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
@@ -61,6 +62,7 @@
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.core.xcontent.XContentParser.Token;
import org.opensearch.index.document.SortedUnsignedLongDocValuesRangeQuery;
+import org.opensearch.index.document.SortedUnsignedLongDocValuesSetQuery;
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.opensearch.index.fielddata.plain.SortedNumericIndexFieldData;
@@ -201,18 +203,39 @@ public Float parse(XContentParser parser, boolean coerce) throws IOException {
}
@Override
- public Query termQuery(String field, Object value) {
+ public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) {
float v = parse(value, false);
+ if (isSearchable && hasDocValues) {
+ Query query = HalfFloatPoint.newExactQuery(field, v);
+ Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery(field, HalfFloatPoint.halfFloatToSortableShort(v));
+ return new IndexOrDocValuesQuery(query, dvQuery);
+ }
+ if (hasDocValues) {
+ return SortedNumericDocValuesField.newSlowExactQuery(field, HalfFloatPoint.halfFloatToSortableShort(v));
+ }
return HalfFloatPoint.newExactQuery(field, v);
}
@Override
- public Query termsQuery(String field, List