Skip to content

Commit

Permalink
[Remove] LegacyESVersion.V_7_2_* and V_7_3_* constants (opensearch-pr…
Browse files Browse the repository at this point in the history
…oject#4702)

Removes all usages of LegacyESVersion.V_7_2_ and LegacyESVersion.V_7_3 version
constants along with related ancient APIs.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
  • Loading branch information
nknize authored Oct 7, 2022
1 parent 2e4b27b commit fe3994c
Show file tree
Hide file tree
Showing 60 changed files with 212 additions and 653 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Remove deprecated code to add node name into log pattern of log4j property file ([#4568](https://github.com/opensearch-project/OpenSearch/pull/4568))
- Unused object and import within TransportClusterAllocationExplainAction ([#4639](https://github.com/opensearch-project/OpenSearch/pull/4639))
- Remove LegacyESVersion.V_7_0_* and V_7_1_* Constants ([#2768](https://https://github.com/opensearch-project/OpenSearch/pull/2768))
- Remove LegacyESVersion.V_7_2_ and V_7_3_ Constants ([#4702](https://github.com/opensearch-project/OpenSearch/pull/4702))
- Always auto release the flood stage block ([#4703](https://github.com/opensearch-project/OpenSearch/pull/4703))

### Fixed
Expand Down Expand Up @@ -160,4 +161,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
### Security

[Unreleased]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...HEAD
[2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...2.x
[2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...2.x
Original file line number Diff line number Diff line change
Expand Up @@ -563,18 +563,22 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
)
)
);
filters.add(PreConfiguredTokenFilter.openSearchVersion("word_delimiter_graph", false, false, (input, version) -> {
boolean adjustOffsets = version.onOrAfter(LegacyESVersion.V_7_3_0);
return new WordDelimiterGraphFilter(
input,
adjustOffsets,
WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS
| WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS
| WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE,
null
);
}));
filters.add(
PreConfiguredTokenFilter.openSearchVersion(
"word_delimiter_graph",
false,
false,
(input, version) -> new WordDelimiterGraphFilter(
input,
true,
WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS
| WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS
| WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE,
null
)
)
);
return filters;
}

Expand All @@ -588,12 +592,12 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edge_ngram", (version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
}));
tokenizers.add(
PreConfiguredTokenizer.openSearchVersion(
"edge_ngram",
(version) -> new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE)
)
);
tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1)));
tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new));
// TODO deprecate and remove in API
Expand All @@ -619,10 +623,7 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
+ "Please change the tokenizer name to [edge_ngram] instead."
);
}
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}));
tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1003,12 +1003,8 @@ public void testClosedIndices() throws Exception {
closeIndex(index);
}

if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
ensureGreenLongWait(index);
assertClosedIndex(index, true);
} else {
assertClosedIndex(index, false);
}
ensureGreenLongWait(index);
assertClosedIndex(index, true);

if (isRunningAgainstOldCluster() == false) {
openIndex(index);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,6 @@ private String getNodeId(Predicate<Version> versionPredicate) throws IOException
if (versionPredicate.test(version)) {
return id;
}
return id;
}
return null;
}
Expand Down Expand Up @@ -457,15 +456,10 @@ public void testRecoveryClosedIndex() throws Exception {
closeIndex(indexName);
}

final Version indexVersionCreated = indexVersionCreated(indexName);
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_7_2_0)) {
// index was created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);
} else {
assertClosedIndex(indexName, false);
}
// index was created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);
}

/**
Expand All @@ -491,14 +485,10 @@ public void testCloseIndexDuringRollingUpgrade() throws Exception {
closeIndex(indexName);
}

if (minimumNodeVersion.onOrAfter(LegacyESVersion.V_7_2_0)) {
// index is created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);
} else {
assertClosedIndex(indexName, false);
}
// index is created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);
}

/**
Expand All @@ -525,27 +515,20 @@ public void testClosedIndexNoopRecovery() throws Exception {
closeIndex(indexName);
}

final Version indexVersionCreated = indexVersionCreated(indexName);
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_7_2_0)) {
// index was created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);
if (minimumNodeVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
switch (CLUSTER_TYPE) {
case OLD: break;
case MIXED:
assertNoopRecoveries(indexName, s -> s.startsWith(CLUSTER_NAME + "-0"));
break;
case UPGRADED:
assertNoopRecoveries(indexName, s -> s.startsWith(CLUSTER_NAME));
break;
}
}
} else {
assertClosedIndex(indexName, false);
}
// index was created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);

switch (CLUSTER_TYPE) {
case OLD: break;
case MIXED:
assertNoopRecoveries(indexName, s -> s.startsWith(CLUSTER_NAME + "-0"));
break;
case UPGRADED:
assertNoopRecoveries(indexName, s -> s.startsWith(CLUSTER_NAME));
break;
}
}
/**
* Returns the version in which the given index has been created
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.gateway;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.stats.ShardStats;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.node.DiscoveryNode;
Expand Down Expand Up @@ -512,7 +511,7 @@ public void testPeerRecoveryForClosedIndices() throws Exception {
}

/**
* If the recovery source is on an old node (before <pre>{@link LegacyESVersion#V_7_2_0}</pre>) then the recovery target
* If the recovery source is on an old node (before <pre>{@code LegacyESVersion#V_7_2_0}</pre>) then the recovery target
* won't have the safe commit after phase1 because the recovery source does not send the global checkpoint in the clean_files
* step. And if the recovery fails and retries, then the recovery stage might not transition properly. This test simulates
* this behavior by changing the global checkpoint in phase1 to unassigned.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@

package org.opensearch.indices;

import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexMetadata;
Expand Down Expand Up @@ -246,13 +244,8 @@ public void testIndexStateShardChanged() throws Throwable {
assertThat(stateChangeListenerNode1.afterCloseSettings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1), equalTo(6));
assertThat(stateChangeListenerNode1.afterCloseSettings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1), equalTo(1));

if (Version.CURRENT.onOrAfter(LegacyESVersion.V_7_2_0)) {
assertShardStatesMatch(stateChangeListenerNode1, 6, CLOSED, CREATED, RECOVERING, POST_RECOVERY, STARTED);
assertShardStatesMatch(stateChangeListenerNode2, 6, CLOSED, CREATED, RECOVERING, POST_RECOVERY, STARTED);
} else {
assertShardStatesMatch(stateChangeListenerNode1, 6, CLOSED);
assertShardStatesMatch(stateChangeListenerNode2, 6, CLOSED);
}
assertShardStatesMatch(stateChangeListenerNode1, 6, CLOSED, CREATED, RECOVERING, POST_RECOVERY, STARTED);
assertShardStatesMatch(stateChangeListenerNode2, 6, CLOSED, CREATED, RECOVERING, POST_RECOVERY, STARTED);
}

private static void assertShardStatesMatch(
Expand Down
5 changes: 0 additions & 5 deletions server/src/main/java/org/opensearch/LegacyESVersion.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@
*/
public class LegacyESVersion extends Version {

public static final LegacyESVersion V_7_2_0 = new LegacyESVersion(7020099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_2_1 = new LegacyESVersion(7020199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_3_0 = new LegacyESVersion(7030099, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final LegacyESVersion V_7_3_1 = new LegacyESVersion(7030199, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final LegacyESVersion V_7_3_2 = new LegacyESVersion(7030299, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final LegacyESVersion V_7_4_0 = new LegacyESVersion(7040099, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final LegacyESVersion V_7_4_1 = new LegacyESVersion(7040199, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final LegacyESVersion V_7_4_2 = new LegacyESVersion(7040299, org.apache.lucene.util.Version.LUCENE_8_2_0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ public static void generateFailureXContent(XContentBuilder builder, Params param
}
t = t.getCause();
}
builder.field(ERROR, ExceptionsHelper.summaryMessage(t));
builder.field(ERROR, ExceptionsHelper.summaryMessage(e));
return;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.health;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.support.ActiveShardCount;
Expand Down Expand Up @@ -90,11 +89,7 @@ public ClusterHealthRequest(StreamInput in) throws IOException {
waitForEvents = Priority.readFrom(in);
}
waitForNoInitializingShards = in.readBoolean();
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
indicesOptions = IndicesOptions.readIndicesOptions(in);
} else {
indicesOptions = IndicesOptions.lenientExpandOpen();
}
indicesOptions = IndicesOptions.readIndicesOptions(in);
}

@Override
Expand Down Expand Up @@ -122,9 +117,7 @@ public void writeTo(StreamOutput out) throws IOException {
Priority.writeTo(waitForEvents, out);
}
out.writeBoolean(waitForNoInitializingShards);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
indicesOptions.writeIndicesOptions(out);
}
indicesOptions.writeIndicesOptions(out);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@
import org.opensearch.OpenSearchException;
import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.nodes.BaseNodeRequest;
import org.opensearch.action.support.nodes.TransportNodesAction;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.monitor.jvm.HotThreads;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportService;

import java.io.IOException;
Expand Down Expand Up @@ -117,7 +117,7 @@ protected NodeHotThreads nodeOperation(NodeRequest request) {
*
* @opensearch.internal
*/
public static class NodeRequest extends BaseNodeRequest {
public static class NodeRequest extends TransportRequest {

NodesHotThreadsRequest request;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,14 @@

import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.nodes.BaseNodeRequest;
import org.opensearch.action.support.nodes.TransportNodesAction;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.node.NodeService;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportService;

import java.io.IOException;
Expand Down Expand Up @@ -126,7 +126,7 @@ protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) {
*
* @opensearch.internal
*/
public static class NodeInfoRequest extends BaseNodeRequest {
public static class NodeInfoRequest extends TransportRequest {

NodesInfoRequest request;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import org.opensearch.action.ActionListener;
import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.nodes.BaseNodeRequest;
import org.opensearch.action.support.nodes.TransportNodesAction;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.service.ClusterService;
Expand All @@ -54,6 +53,7 @@
import org.opensearch.plugins.ReloadablePlugin;
import org.opensearch.tasks.Task;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportService;

import java.io.IOException;
Expand Down Expand Up @@ -188,7 +188,7 @@ protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation(NodeReque
*
* @opensearch.internal
*/
public static class NodeRequest extends BaseNodeRequest {
public static class NodeRequest extends TransportRequest {

NodesReloadSecureSettingsRequest request;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,14 @@

import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.nodes.BaseNodeRequest;
import org.opensearch.action.support.nodes.TransportNodesAction;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.node.NodeService;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportService;

import java.io.IOException;
Expand Down Expand Up @@ -127,7 +127,7 @@ protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) {
*
* @opensearch.internal
*/
public static class NodeStatsRequest extends BaseNodeRequest {
public static class NodeStatsRequest extends TransportRequest {

NodesStatsRequest request;

Expand Down
Loading

0 comments on commit fe3994c

Please sign in to comment.