Remove historical features infrastructure (#117043)

v9 can only talk to 8.18, and historical features are a maximum of 8.12, so we can remove all historical features and infrastructure.
This commit is contained in:
Simon Cooper 2024-11-24 19:39:09 +00:00 committed by GitHub
parent 5f3b380134
commit e701697eb5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 62 additions and 337 deletions

View file

@ -119,7 +119,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
noticeFile.set(file("NOTICE"))
"""
when:
def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build()
def result = gradleRunner("assemble", "-x", "generateClusterFeaturesMetadata").build()
then:
result.task(":assemble").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world.jar").exists()

View file

@ -303,7 +303,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
"""
when:
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build()
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build()
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS

View file

@ -14,7 +14,7 @@ import groovy.lang.Closure;
import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.elasticsearch.gradle.internal.info.BuildParameterExtension;
import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin;
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
@ -38,7 +38,7 @@ public class BaseInternalPluginBuildPlugin implements Plugin<Project> {
project.getPluginManager().apply(PluginBuildPlugin.class);
project.getPluginManager().apply(JarHellPrecommitPlugin.class);
project.getPluginManager().apply(ElasticsearchJavaPlugin.class);
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi();
// Clear default dependencies added by public PluginBuildPlugin as we add our
// own project dependencies for internal builds

View file

@ -12,7 +12,7 @@ package org.elasticsearch.gradle.internal;
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks;
import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin;
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
import org.gradle.api.InvalidUserDataException;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
@ -63,7 +63,7 @@ public class BuildPlugin implements Plugin<Project> {
project.getPluginManager().apply(ElasticsearchJavadocPlugin.class);
project.getPluginManager().apply(DependenciesInfoPlugin.class);
project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class);
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
InternalPrecommitTasks.create(project, true);
configureLicenseAndNotice(project);
}

View file

@ -21,10 +21,10 @@ import org.gradle.api.tasks.TaskProvider;
import java.util.Map;
/**
* Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing.
* Extracts cluster feature metadata into a machine-readable format for use in backward compatibility testing.
*/
public class HistoricalFeaturesMetadataPlugin implements Plugin<Project> {
public static final String HISTORICAL_FEATURES_JSON = "historical-features.json";
public class ClusterFeaturesMetadataPlugin implements Plugin<Project> {
public static final String CLUSTER_FEATURES_JSON = "cluster-features.json";
public static final String FEATURES_METADATA_TYPE = "features-metadata-json";
public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata";
@ -40,13 +40,13 @@ public class HistoricalFeaturesMetadataPlugin implements Plugin<Project> {
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
TaskProvider<HistoricalFeaturesMetadataTask> generateTask = project.getTasks()
.register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> {
TaskProvider<ClusterFeaturesMetadataTask> generateTask = project.getTasks()
.register("generateClusterFeaturesMetadata", ClusterFeaturesMetadataTask.class, task -> {
task.setClasspath(
featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath())
.plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME))
);
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON));
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(CLUSTER_FEATURES_JSON));
});
Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {

View file

@ -26,7 +26,7 @@ import org.gradle.workers.WorkerExecutor;
import javax.inject.Inject;
@CacheableTask
public abstract class HistoricalFeaturesMetadataTask extends DefaultTask {
public abstract class ClusterFeaturesMetadataTask extends DefaultTask {
private FileCollection classpath;
@OutputFile
@ -46,30 +46,30 @@ public abstract class HistoricalFeaturesMetadataTask extends DefaultTask {
@TaskAction
public void execute() {
getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> {
getWorkerExecutor().noIsolation().submit(ClusterFeaturesMetadataWorkAction.class, params -> {
params.getClasspath().setFrom(getClasspath());
params.getOutputFile().set(getOutputFile());
});
}
public interface HistoricalFeaturesWorkParameters extends WorkParameters {
public interface ClusterFeaturesWorkParameters extends WorkParameters {
ConfigurableFileCollection getClasspath();
RegularFileProperty getOutputFile();
}
public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction<HistoricalFeaturesWorkParameters> {
public abstract static class ClusterFeaturesMetadataWorkAction implements WorkAction<ClusterFeaturesWorkParameters> {
private final ExecOperations execOperations;
@Inject
public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) {
public ClusterFeaturesMetadataWorkAction(ExecOperations execOperations) {
this.execOperations = execOperations;
}
@Override
public void execute() {
LoggedExec.javaexec(execOperations, spec -> {
spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor");
spec.getMainClass().set("org.elasticsearch.extractor.features.ClusterFeaturesMetadataExtractor");
spec.classpath(getParameters().getClasspath());
spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath());
});

View file

@ -20,8 +20,8 @@ import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin;
import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin;
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener;
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
@ -116,12 +116,12 @@ public class RestTestBasePlugin implements Plugin<Project> {
extractedPluginsConfiguration.extendsFrom(pluginsConfiguration);
configureArtifactTransforms(project);
// Create configuration for aggregating historical feature metadata
// Create configuration for aggregating feature metadata
FileCollection featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
c.setCanBeConsumed(false);
c.setCanBeResolved(true);
c.attributes(
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
);
c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server"))));
c.withDependencies(dependencies -> {
@ -136,10 +136,7 @@ public class RestTestBasePlugin implements Plugin<Project> {
c.setCanBeConsumed(false);
c.setCanBeResolved(true);
c.attributes(
a -> a.attribute(
ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE,
HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE
)
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
);
c.defaultDependencies(
d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata")))

View file

@ -14,7 +14,7 @@ import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.ConcatFilesTask
import org.elasticsearch.gradle.internal.DependenciesInfoPlugin
import org.elasticsearch.gradle.internal.NoticeTask
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin
import java.nio.file.Files
import java.nio.file.Path
@ -33,7 +33,7 @@ configurations {
}
featuresMetadata {
attributes {
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
}
}
}

View file

@ -95,7 +95,7 @@ public class ClusterFeatures implements Diffable<ClusterFeatures>, ChunkedToXCon
/**
* {@code true} if {@code feature} is present on all nodes in the cluster.
* <p>
* NOTE: This should not be used directly, as it does not read historical features.
* NOTE: This should not be used directly.
* Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead.
*/
@SuppressForbidden(reason = "directly reading cluster features")

View file

@ -9,25 +9,19 @@
package org.elasticsearch.features;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;
import static org.elasticsearch.features.FeatureService.CLUSTER_FEATURES_ADDED_VERSION;
/**
* Reads and consolidate features exposed by a list {@link FeatureSpecification}, grouping them into historical features and node
* features for the consumption of {@link FeatureService}
* Reads and consolidate features exposed by a list {@link FeatureSpecification},
* grouping them together for the consumption of {@link FeatureService}
*/
public class FeatureData {
@ -40,19 +34,14 @@ public class FeatureData {
}
}
private final NavigableMap<Version, Set<String>> historicalFeatures;
private final Map<String, NodeFeature> nodeFeatures;
private FeatureData(NavigableMap<Version, Set<String>> historicalFeatures, Map<String, NodeFeature> nodeFeatures) {
this.historicalFeatures = historicalFeatures;
private FeatureData(Map<String, NodeFeature> nodeFeatures) {
this.nodeFeatures = nodeFeatures;
}
public static FeatureData createFromSpecifications(List<? extends FeatureSpecification> specs) {
Map<String, FeatureSpecification> allFeatures = new HashMap<>();
// Initialize historicalFeatures with empty version to guarantee there's a floor entry for every version
NavigableMap<Version, Set<String>> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of()));
Map<String, NodeFeature> nodeFeatures = new HashMap<>();
for (FeatureSpecification spec : specs) {
Set<NodeFeature> specFeatures = spec.getFeatures();
@ -61,39 +50,6 @@ public class FeatureData {
specFeatures.addAll(spec.getTestFeatures());
}
for (var hfe : spec.getHistoricalFeatures().entrySet()) {
FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec);
// the same SPI class can be loaded multiple times if it's in the base classloader
if (existing != null && existing.getClass() != spec.getClass()) {
throw new IllegalArgumentException(
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", hfe.getKey().id(), existing, spec)
);
}
if (hfe.getValue().after(CLUSTER_FEATURES_ADDED_VERSION)) {
throw new IllegalArgumentException(
Strings.format(
"Historical feature [%s] declared by [%s] for version [%s] is not a historical version",
hfe.getKey().id(),
spec,
hfe.getValue()
)
);
}
if (specFeatures.contains(hfe.getKey())) {
throw new IllegalArgumentException(
Strings.format(
"Feature [%s] cannot be declared as both a regular and historical feature by [%s]",
hfe.getKey().id(),
spec
)
);
}
historicalFeatures.computeIfAbsent(hfe.getValue(), k -> new HashSet<>()).add(hfe.getKey().id());
}
for (NodeFeature f : specFeatures) {
FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec);
if (existing != null && existing.getClass() != spec.getClass()) {
@ -106,24 +62,7 @@ public class FeatureData {
}
}
return new FeatureData(consolidateHistoricalFeatures(historicalFeatures), Map.copyOf(nodeFeatures));
}
private static NavigableMap<Version, Set<String>> consolidateHistoricalFeatures(
NavigableMap<Version, Set<String>> declaredHistoricalFeatures
) {
// update each version by adding in all features from previous versions
Set<String> featureAggregator = new HashSet<>();
for (Map.Entry<Version, Set<String>> versions : declaredHistoricalFeatures.entrySet()) {
featureAggregator.addAll(versions.getValue());
versions.setValue(Set.copyOf(featureAggregator));
}
return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures);
}
public NavigableMap<Version, Set<String>> getHistoricalFeatures() {
return historicalFeatures;
return new FeatureData(Map.copyOf(nodeFeatures));
}
public Map<String, NodeFeature> getNodeFeatures() {

View file

@ -9,7 +9,6 @@
package org.elasticsearch.features;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.logging.LogManager;
@ -17,8 +16,6 @@ import org.elasticsearch.logging.Logger;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
/**
* Manages information on the features supported by nodes in the cluster.
@ -34,9 +31,6 @@ public class FeatureService {
private static final Logger logger = LogManager.getLogger(FeatureService.class);
public static final Version CLUSTER_FEATURES_ADDED_VERSION = Version.V_8_12_0;
private final NavigableMap<Version, Set<String>> historicalFeatures;
private final Map<String, NodeFeature> nodeFeatures;
/**
@ -47,13 +41,12 @@ public class FeatureService {
var featureData = FeatureData.createFromSpecifications(specs);
nodeFeatures = featureData.getNodeFeatures();
historicalFeatures = featureData.getHistoricalFeatures();
logger.info("Registered local node features {}", nodeFeatures.keySet().stream().sorted().toList());
}
/**
* The non-historical features supported by this node.
* The features supported by this node.
* @return Map of {@code feature-id} to its declaring {@code NodeFeature} object.
*/
public Map<String, NodeFeature> getNodeFeatures() {
@ -65,11 +58,6 @@ public class FeatureService {
*/
@SuppressForbidden(reason = "We need basic feature information from cluster state")
public boolean clusterHasFeature(ClusterState state, NodeFeature feature) {
if (state.clusterFeatures().clusterHasFeature(feature)) {
return true;
}
var features = historicalFeatures.floorEntry(state.getNodes().getMinNodeVersion());
return features != null && features.getValue().contains(feature.id());
return state.clusterFeatures().clusterHasFeature(feature);
}
}

View file

@ -9,9 +9,6 @@
package org.elasticsearch.features;
import org.elasticsearch.Version;
import java.util.Map;
import java.util.Set;
/**
@ -49,12 +46,4 @@ public interface FeatureSpecification {
default Set<NodeFeature> getTestFeatures() {
return Set.of();
}
/**
* Returns information on historical features that should be deemed to be present on all nodes
* on or above the {@link Version} specified.
*/
default Map<NodeFeature, Version> getHistoricalFeatures() {
return Map.of();
}
}

View file

@ -9,15 +9,9 @@
package org.elasticsearch.features;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.IndexVersions;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.util.List;
import java.util.Map;
@ -30,79 +24,36 @@ public class FeatureServiceTests extends ESTestCase {
private static class TestFeatureSpecification implements FeatureSpecification {
private final Set<NodeFeature> features;
private final Map<NodeFeature, Version> historicalFeatures;
private TestFeatureSpecification(Set<NodeFeature> features, Map<NodeFeature, Version> historicalFeatures) {
private TestFeatureSpecification(Set<NodeFeature> features) {
this.features = features;
this.historicalFeatures = historicalFeatures;
}
@Override
public Set<NodeFeature> getFeatures() {
return features;
}
@Override
public Map<NodeFeature, Version> getHistoricalFeatures() {
return historicalFeatures;
}
}
public void testFailsDuplicateFeatures() {
// these all need to be separate classes to trigger the exception
FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) {
FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1"))) {
};
FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) {
};
FeatureSpecification hfs1 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) {
};
FeatureSpecification hfs2 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) {
FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1"))) {
};
assertThat(
expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, fs2))).getMessage(),
containsString("Duplicate feature")
);
assertThat(
expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(hfs1, hfs2))).getMessage(),
containsString("Duplicate feature")
);
assertThat(
expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, hfs1))).getMessage(),
containsString("Duplicate feature")
);
}
public void testFailsNonHistoricalVersion() {
FeatureSpecification fs = new TestFeatureSpecification(
Set.of(),
Map.of(new NodeFeature("f1"), Version.fromId(FeatureService.CLUSTER_FEATURES_ADDED_VERSION.id + 1))
);
assertThat(
expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(),
containsString("not a historical version")
);
}
public void testFailsSameRegularAndHistoricalFeature() {
FeatureSpecification fs = new TestFeatureSpecification(
Set.of(new NodeFeature("f1")),
Map.of(new NodeFeature("f1"), Version.V_8_12_0)
);
assertThat(
expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(),
containsString("cannot be declared as both a regular and historical feature")
);
}
public void testGetNodeFeaturesCombinesAllSpecs() {
List<FeatureSpecification> specs = List.of(
new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()),
new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()),
new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()),
new TestFeatureSpecification(Set.of(), Map.of())
new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2"))),
new TestFeatureSpecification(Set.of(new NodeFeature("f3"))),
new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5"))),
new TestFeatureSpecification(Set.of())
);
FeatureService service = new FeatureService(specs);
@ -111,10 +62,10 @@ public class FeatureServiceTests extends ESTestCase {
public void testStateHasFeatures() {
List<FeatureSpecification> specs = List.of(
new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()),
new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()),
new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()),
new TestFeatureSpecification(Set.of(), Map.of())
new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2"))),
new TestFeatureSpecification(Set.of(new NodeFeature("f3"))),
new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5"))),
new TestFeatureSpecification(Set.of())
);
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
@ -130,50 +81,4 @@ public class FeatureServiceTests extends ESTestCase {
assertFalse(service.clusterHasFeature(state, new NodeFeature("nf2")));
assertFalse(service.clusterHasFeature(state, new NodeFeature("nf3")));
}
private static ClusterState stateWithMinVersion(Version version) {
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder();
nodes.add(DiscoveryNodeUtils.builder("node").version(version, IndexVersions.ZERO, IndexVersion.current()).build());
for (int n = randomInt(5); n >= 0; n--) {
nodes.add(
DiscoveryNodeUtils.builder("node" + n)
.version(
VersionUtils.randomVersionBetween(random(), version, Version.CURRENT),
IndexVersions.ZERO,
IndexVersion.current()
)
.build()
);
}
return ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build();
}
public void testStateHasHistoricalFeatures() {
NodeFeature v8_11_0 = new NodeFeature("hf_8.11.0");
NodeFeature v8_10_0 = new NodeFeature("hf_8.10.0");
NodeFeature v7_17_0 = new NodeFeature("hf_7.17.0");
List<FeatureSpecification> specs = List.of(
new TestFeatureSpecification(Set.of(), Map.of(v8_11_0, Version.V_8_11_0)),
new TestFeatureSpecification(Set.of(), Map.of(v8_10_0, Version.V_8_10_0)),
new TestFeatureSpecification(Set.of(), Map.of(v7_17_0, Version.V_7_17_0))
);
FeatureService service = new FeatureService(specs);
assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_11_0));
assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_10_0));
assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v7_17_0));
assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_11_0));
assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_10_0));
assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v7_17_0));
assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_11_0));
assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_10_0));
assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v7_17_0));
assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_11_0));
assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_10_0));
assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v7_17_0));
}
}

View file

@ -63,7 +63,6 @@ import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.health.node.selection.HealthNode;
import org.elasticsearch.index.IndexSettings;
@ -398,29 +397,11 @@ public abstract class ESRestTestCase extends ESTestCase {
assert nodesVersions != null;
}
/**
* Override to provide additional test-only historical features.
*
* Note: This extension point cannot be used to add cluster features. The provided {@link FeatureSpecification}s
* must contain only historical features, otherwise an assertion error is thrown.
*/
protected List<FeatureSpecification> additionalTestOnlyHistoricalFeatures() {
return List.of();
}
protected final TestFeatureService createTestFeatureService(
Map<String, Set<String>> clusterStateFeatures,
Set<Version> semanticNodeVersions
) {
// Historical features information is unavailable when using legacy test plugins
if (ESRestTestFeatureService.hasFeatureMetadata() == false) {
logger.warn(
"This test is running on the legacy test framework; historical features from production code will not be available. "
+ "You need to port the test to the new test plugins in order to use historical features from production code. "
+ "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification."
);
}
return new ESRestTestFeatureService(additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, clusterStateFeatures.values());
return new ESRestTestFeatureService(semanticNodeVersions, clusterStateFeatures.values());
}
protected static boolean has(ProductFeature feature) {

View file

@ -13,9 +13,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.features.FeatureData;
import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -25,13 +22,9 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
@ -48,33 +41,12 @@ class ESRestTestFeatureService implements TestFeatureService {
*/
private static final Pattern VERSION_FEATURE_PATTERN = Pattern.compile("gte_v(\\d+\\.\\d+\\.\\d+)");
private final Set<String> knownHistoricalFeatureNames;
private final Collection<Version> nodeVersions;
private final Collection<Set<String>> nodeFeatures;
private final Collection<Set<String>> nodeHistoricalFeatures;
ESRestTestFeatureService(List<FeatureSpecification> featureSpecs, Set<Version> nodeVersions, Collection<Set<String>> nodeFeatures) {
List<FeatureSpecification> specs = new ArrayList<>(featureSpecs);
if (MetadataHolder.HISTORICAL_FEATURES != null) {
specs.add(MetadataHolder.HISTORICAL_FEATURES);
}
FeatureData featureData = FeatureData.createFromSpecifications(specs);
assert featureData.getNodeFeatures().isEmpty()
: Strings.format(
"Only historical features can be injected via ESRestTestCase#additionalTestOnlyHistoricalFeatures(), rejecting %s",
featureData.getNodeFeatures().keySet()
);
this.knownHistoricalFeatureNames = featureData.getHistoricalFeatures().lastEntry().getValue();
ESRestTestFeatureService(Set<Version> nodeVersions, Collection<Set<String>> nodeFeatures) {
this.nodeVersions = nodeVersions;
this.nodeFeatures = nodeFeatures;
this.nodeHistoricalFeatures = nodeVersions.stream()
.map(featureData.getHistoricalFeatures()::floorEntry)
.map(Map.Entry::getValue)
.toList();
}
public static boolean hasFeatureMetadata() {
return MetadataHolder.HISTORICAL_FEATURES != null;
}
private static <T> boolean checkCollection(Collection<T> coll, Predicate<T> pred, boolean any) {
@ -83,11 +55,10 @@ class ESRestTestFeatureService implements TestFeatureService {
@Override
public boolean clusterHasFeature(String featureId, boolean any) {
if (checkCollection(nodeFeatures, s -> s.contains(featureId), any)
|| checkCollection(nodeHistoricalFeatures, s -> s.contains(featureId), any)) {
if (checkCollection(nodeFeatures, s -> s.contains(featureId), any)) {
return true;
}
if (MetadataHolder.FEATURE_NAMES.contains(featureId) || knownHistoricalFeatureNames.contains(featureId)) {
if (MetadataHolder.FEATURE_NAMES.contains(featureId)) {
return false; // feature known but not present
}
@ -131,24 +102,20 @@ class ESRestTestFeatureService implements TestFeatureService {
return false;
}
public static boolean hasFeatureMetadata() {
return MetadataHolder.FEATURE_NAMES.isEmpty() == false;
}
private static class MetadataHolder {
private static final FeatureSpecification HISTORICAL_FEATURES;
private static final Set<String> FEATURE_NAMES;
static {
String metadataPath = System.getProperty("tests.features.metadata.path");
if (metadataPath == null) {
FEATURE_NAMES = emptySet();
HISTORICAL_FEATURES = null;
} else {
Set<String> featureNames = new HashSet<>();
Map<NodeFeature, Version> historicalFeatures = new HashMap<>();
loadFeatureMetadata(metadataPath, (key, value) -> {
if (key.equals("historical_features") && value instanceof Map<?, ?> map) {
for (var entry : map.entrySet()) {
historicalFeatures.put(new NodeFeature((String) entry.getKey()), Version.fromString((String) entry.getValue()));
}
}
if (key.equals("feature_names") && value instanceof Collection<?> collection) {
for (var entry : collection) {
featureNames.add((String) entry);
@ -156,13 +123,6 @@ class ESRestTestFeatureService implements TestFeatureService {
}
});
FEATURE_NAMES = Collections.unmodifiableSet(featureNames);
Map<NodeFeature, Version> unmodifiableHistoricalFeatures = Collections.unmodifiableMap(historicalFeatures);
HISTORICAL_FEATURES = new FeatureSpecification() {
@Override
public Map<NodeFeature, Version> getHistoricalFeatures() {
return unmodifiableHistoricalFeatures;
}
};
}
}

View file

@ -9,9 +9,8 @@
package org.elasticsearch.extractor.features;
import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.xcontent.XContentGenerator;
@ -25,14 +24,12 @@ import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.stream.Stream;
public class HistoricalFeaturesMetadataExtractor {
public class ClusterFeaturesMetadataExtractor {
private final ClassLoader classLoader;
static {
@ -40,7 +37,7 @@ public class HistoricalFeaturesMetadataExtractor {
LogConfigurator.configureESLogging();
}
public HistoricalFeaturesMetadataExtractor(ClassLoader classLoader) {
public ClusterFeaturesMetadataExtractor(ClassLoader classLoader) {
this.classLoader = classLoader;
}
@ -56,9 +53,7 @@ public class HistoricalFeaturesMetadataExtractor {
printUsageAndExit();
}
new HistoricalFeaturesMetadataExtractor(HistoricalFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile(
outputFile
);
new ClusterFeaturesMetadataExtractor(ClusterFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile(outputFile);
}
public void generateMetadataFile(Path outputFile) {
@ -67,13 +62,7 @@ public class HistoricalFeaturesMetadataExtractor {
XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os)
) {
generator.writeStartObject();
extractHistoricalFeatureMetadata((historical, names) -> {
generator.writeFieldName("historical_features");
generator.writeStartObject();
for (Map.Entry<NodeFeature, Version> entry : historical.entrySet()) {
generator.writeStringField(entry.getKey().id(), entry.getValue().toString());
}
generator.writeEndObject();
extractClusterFeaturesMetadata(names -> {
generator.writeFieldName("feature_names");
generator.writeStartArray();
for (var entry : names) {
@ -87,22 +76,19 @@ public class HistoricalFeaturesMetadataExtractor {
}
}
void extractHistoricalFeatureMetadata(CheckedBiConsumer<Map<NodeFeature, Version>, Set<String>, IOException> metadataConsumer)
throws IOException {
Map<NodeFeature, Version> historicalFeatures = new HashMap<>();
void extractClusterFeaturesMetadata(CheckedConsumer<Set<String>, IOException> metadataConsumer) throws IOException {
Set<String> featureNames = new HashSet<>();
ServiceLoader<FeatureSpecification> featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader);
for (FeatureSpecification featureSpecification : featureSpecLoader) {
historicalFeatures.putAll(featureSpecification.getHistoricalFeatures());
Stream.concat(featureSpecification.getFeatures().stream(), featureSpecification.getTestFeatures().stream())
.map(NodeFeature::id)
.forEach(featureNames::add);
}
metadataConsumer.accept(historicalFeatures, featureNames);
metadataConsumer.accept(featureNames);
}
private static void printUsageAndExit() {
System.err.println("Usage: HistoricalFeaturesMetadataExtractor <output file>");
System.err.println("Usage: ClusterFeaturesMetadataExtractor <output file>");
System.exit(1);
}
}

View file

@ -9,8 +9,6 @@
package org.elasticsearch.extractor.features;
import org.elasticsearch.Version;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -21,7 +19,6 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@ -29,25 +26,19 @@ import java.util.Set;
import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase {
public class ClusterFeaturesMetadataExtractorTests extends ESTestCase {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
public void testExtractHistoricalMetadata() throws IOException {
HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader());
Map<NodeFeature, Version> nodeFeatureVersionMap = new HashMap<>();
ClusterFeaturesMetadataExtractor extractor = new ClusterFeaturesMetadataExtractor(this.getClass().getClassLoader());
Set<String> featureNamesSet = new HashSet<>();
extractor.extractHistoricalFeatureMetadata((historical, names) -> {
nodeFeatureVersionMap.putAll(historical);
featureNamesSet.addAll(names);
});
// assertThat(nodeFeatureVersionMap, not(anEmptyMap()));
extractor.extractClusterFeaturesMetadata(featureNamesSet::addAll);
assertThat(featureNamesSet, not(empty()));
assertThat(featureNamesSet, hasItem("test_features_enabled"));
@ -55,11 +46,7 @@ public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase {
extractor.generateMetadataFile(outputFile);
try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) {
Map<String, Object> parsedMap = parser.map();
assertThat(parsedMap, hasKey("historical_features"));
assertThat(parsedMap, hasKey("feature_names"));
@SuppressWarnings("unchecked")
Map<String, Object> historicalFeaturesMap = (Map<String, Object>) (parsedMap.get("historical_features"));
nodeFeatureVersionMap.forEach((key, value) -> assertThat(historicalFeaturesMap, hasEntry(key.id(), value.toString())));
@SuppressWarnings("unchecked")
Collection<String> featureNamesList = (Collection<String>) (parsedMap.get("feature_names"));

View file

@ -51,7 +51,6 @@ import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
@ -207,10 +206,7 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase {
}
}
var features = Stream.concat(
new EsqlFeatures().getFeatures().stream(),
new EsqlFeatures().getHistoricalFeatures().keySet().stream()
).map(NodeFeature::id).collect(Collectors.toSet());
var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet());
for (String feature : testCase.requiredCapabilities) {
var esqlFeature = "esql." + feature;

View file

@ -567,9 +567,6 @@ public class EsqlCapabilities {
for (NodeFeature feature : new EsqlFeatures().getFeatures()) {
caps.add(cap(feature));
}
for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) {
caps.add(cap(feature));
}
return Set.copyOf(caps);
}