Skip to content

Commit

Permalink
Introducing TestFeatureService to ESRestTestCase (elastic#102243)
Browse files Browse the repository at this point in the history
* Introducing TestFeatureService to ESRestTestCase

- Added RestTestLegacyFeatures to encompass legacy (historical) features that have been removed from production code but are still needed by REST tests
- Encapsulated Mark's getHistoricalFeatures method inside a FeatureProvider (ESRestTestCaseHistoricalFeatures)
- ESRestTestCaseHistoricalFeatures is not yet used, as we need to figure out how to deal with old cluster tests
  • Loading branch information
ldematte authored Nov 22, 2023
1 parent 9e3d0db commit 4e81494
Show file tree
Hide file tree
Showing 8 changed files with 251 additions and 90 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.elasticsearch.test.cluster.local.distribution.DistributionType;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.ObjectPath;
import org.elasticsearch.test.rest.RestTestLegacyFeatures;
import org.elasticsearch.transport.Compression;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
Expand Down Expand Up @@ -69,7 +70,6 @@
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION;
import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION;
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
import static org.elasticsearch.test.MapMatcher.assertMap;
Expand Down Expand Up @@ -1620,7 +1620,7 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception {

// If we are on 7.x create an alias that includes both a system index and a non-system index so we can be sure it gets
// upgraded properly. If we're already on 8.x, skip this part of the test.
if (minimumNodeVersion().before(SYSTEM_INDEX_ENFORCEMENT_VERSION)) {
if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) {
// Create an alias to make sure it gets upgraded properly
Request putAliasRequest = new Request("POST", "/_aliases");
putAliasRequest.setJsonEntity("""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.test.XContentTestUtils.JsonMapView;
import org.elasticsearch.test.rest.RestTestLegacyFeatures;

import java.util.Map;

import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION;
import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
Expand Down Expand Up @@ -89,7 +89,7 @@ public void testSystemIndicesUpgrades() throws Exception {

// If we are on 7.x create an alias that includes both a system index and a non-system index so we can be sure it gets
// upgraded properly. If we're already on 8.x, skip this part of the test.
if (minimumNodeVersion().before(SYSTEM_INDEX_ENFORCEMENT_VERSION)) {
if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) {
// Create an alias to make sure it gets upgraded properly
Request putAliasRequest = new Request("POST", "/_aliases");
putAliasRequest.setJsonEntity("""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

import org.apache.lucene.util.automaton.Automaton;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
Expand Down Expand Up @@ -63,7 +62,6 @@ public class IndexNameExpressionResolver {
private static final Predicate<String> ALWAYS_TRUE = s -> true;

public static final String EXCLUDED_DATA_STREAMS_KEY = "es.excluded_ds";
public static final Version SYSTEM_INDEX_ENFORCEMENT_VERSION = Version.V_8_0_0;
public static final IndexVersion SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION = IndexVersions.V_8_0_0;

private final ThreadContext threadContext;
Expand Down
102 changes: 102 additions & 0 deletions server/src/main/java/org/elasticsearch/features/FeatureData.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.features;

import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;

import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;

import static org.elasticsearch.features.FeatureService.CLUSTER_FEATURES_ADDED_VERSION;

/**
* Reads and consolidate features exposed by a list {@link FeatureSpecification}, grouping them into historical features and node
* features for the consumption of {@link FeatureService}
*/
public class FeatureData {
private final NavigableMap<Version, Set<String>> historicalFeatures;
private final Map<String, NodeFeature> nodeFeatures;

private FeatureData(NavigableMap<Version, Set<String>> historicalFeatures, Map<String, NodeFeature> nodeFeatures) {
this.historicalFeatures = historicalFeatures;
this.nodeFeatures = nodeFeatures;
}

public static FeatureData createFromSpecifications(List<? extends FeatureSpecification> specs) {
Map<String, FeatureSpecification> allFeatures = new HashMap<>();

NavigableMap<Version, Set<String>> historicalFeatures = new TreeMap<>();
Map<String, NodeFeature> nodeFeatures = new HashMap<>();
for (FeatureSpecification spec : specs) {
for (var hfe : spec.getHistoricalFeatures().entrySet()) {
FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec);
// the same SPI class can be loaded multiple times if it's in the base classloader
if (existing != null && existing.getClass() != spec.getClass()) {
throw new IllegalArgumentException(
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", hfe.getKey().id(), existing, spec)
);
}

if (hfe.getValue().after(CLUSTER_FEATURES_ADDED_VERSION)) {
throw new IllegalArgumentException(
Strings.format(
"Historical feature [%s] declared by [%s] for version [%s] is not a historical version",
hfe.getKey().id(),
spec,
hfe.getValue()
)
);
}

historicalFeatures.computeIfAbsent(hfe.getValue(), k -> new HashSet<>()).add(hfe.getKey().id());
}

for (NodeFeature f : spec.getFeatures()) {
FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec);
if (existing != null && existing.getClass() != spec.getClass()) {
throw new IllegalArgumentException(
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", f.id(), existing, spec)
);
}

nodeFeatures.put(f.id(), f);
}
}

return new FeatureData(consolidateHistoricalFeatures(historicalFeatures), Map.copyOf(nodeFeatures));
}

private static NavigableMap<Version, Set<String>> consolidateHistoricalFeatures(
NavigableMap<Version, Set<String>> declaredHistoricalFeatures
) {
// update each version by adding in all features from previous versions
Set<String> featureAggregator = new HashSet<>();
for (Map.Entry<Version, Set<String>> versions : declaredHistoricalFeatures.entrySet()) {
featureAggregator.addAll(versions.getValue());
versions.setValue(Set.copyOf(featureAggregator));
}

return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures);
}

public NavigableMap<Version, Set<String>> getHistoricalFeatures() {
return historicalFeatures;
}

public Map<String, NodeFeature> getNodeFeatures() {
return nodeFeatures;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,14 @@

import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;

import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;

/**
* Manages information on the features supported by nodes in the cluster
Expand All @@ -42,65 +37,14 @@ public class FeatureService {
private final Map<String, NodeFeature> nodeFeatures;

public FeatureService(List<? extends FeatureSpecification> specs) {
Map<String, FeatureSpecification> allFeatures = new HashMap<>();

NavigableMap<Version, Set<String>> historicalFeatures = new TreeMap<>();
Map<String, NodeFeature> nodeFeatures = new HashMap<>();
for (FeatureSpecification spec : specs) {
for (var hfe : spec.getHistoricalFeatures().entrySet()) {
FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec);
// the same SPI class can be loaded multiple times if it's in the base classloader
if (existing != null && existing.getClass() != spec.getClass()) {
throw new IllegalArgumentException(
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", hfe.getKey().id(), existing, spec)
);
}

if (hfe.getValue().after(CLUSTER_FEATURES_ADDED_VERSION)) {
throw new IllegalArgumentException(
Strings.format(
"Historical feature [%s] declared by [%s] for version [%s] is not a historical version",
hfe.getKey().id(),
spec,
hfe.getValue()
)
);
}

historicalFeatures.computeIfAbsent(hfe.getValue(), k -> new HashSet<>()).add(hfe.getKey().id());
}

for (NodeFeature f : spec.getFeatures()) {
FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec);
if (existing != null && existing.getClass() != spec.getClass()) {
throw new IllegalArgumentException(
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", f.id(), existing, spec)
);
}

nodeFeatures.put(f.id(), f);
}
}

this.historicalFeatures = consolidateHistoricalFeatures(historicalFeatures);
this.nodeFeatures = Map.copyOf(nodeFeatures);
var featureData = FeatureData.createFromSpecifications(specs);
nodeFeatures = featureData.getNodeFeatures();
historicalFeatures = featureData.getHistoricalFeatures();

logger.info("Registered local node features {}", nodeFeatures.keySet().stream().sorted().toList());
}

private static NavigableMap<Version, Set<String>> consolidateHistoricalFeatures(
NavigableMap<Version, Set<String>> declaredHistoricalFeatures
) {
// update each version by adding in all features from previous versions
Set<String> featureAggregator = new HashSet<>();
for (Map.Entry<Version, Set<String>> versions : declaredHistoricalFeatures.entrySet()) {
featureAggregator.addAll(versions.getValue());
versions.setValue(Set.copyOf(featureAggregator));
}

return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures);
}

/**
* The non-historical features supported by this node.
*/
Expand Down
Loading

0 comments on commit 4e81494

Please sign in to comment.