Expose historical feature metadata to rest tests (#102110)
This introduces a new getHistoricalFeatures() method on ESRestTestCase which returns a map of historical feature version mappings loaded from FeatureSpecification implementations from any plugins/modules in use by the current test suite. The mappings are generated by a new Gradle task at build time, and then injected into the test runtime as a System property.
This commit is contained in:
parent
f12afe46d6
commit
5d379626df
|
@ -31,7 +31,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
|
@ -39,7 +39,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
|
@ -58,11 +58,11 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
id 'java'
|
||||
id 'elasticsearch.global-build-info'
|
||||
}
|
||||
|
||||
|
||||
apply plugin:'elasticsearch.build'
|
||||
group = 'org.acme'
|
||||
description = "some example project"
|
||||
|
||||
|
||||
repositories {
|
||||
maven {
|
||||
name = "local-test"
|
||||
|
@ -73,7 +73,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
}
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
|
||||
dependencies {
|
||||
jarHell 'org.elasticsearch:elasticsearch-core:current'
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
package org.elasticsearch;
|
||||
|
||||
|
||||
public class SampleClass {
|
||||
}
|
||||
""".stripIndent()
|
||||
|
@ -117,7 +117,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
noticeFile.set(file("NOTICE"))
|
||||
"""
|
||||
when:
|
||||
def result = gradleRunner("assemble").build()
|
||||
def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build()
|
||||
then:
|
||||
result.task(":assemble").outcome == TaskOutcome.SUCCESS
|
||||
file("build/distributions/hello-world.jar").exists()
|
||||
|
@ -146,7 +146,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
|
|||
}
|
||||
licenseFile.set(file("LICENSE"))
|
||||
noticeFile.set(file("NOTICE"))
|
||||
|
||||
|
||||
tasks.named("forbiddenApisMain").configure {enabled = false }
|
||||
tasks.named('checkstyleMain').configure { enabled = false }
|
||||
tasks.named('loggerUsageCheck').configure { enabled = false }
|
||||
|
|
|
@ -29,7 +29,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
id 'elasticsearch.java'
|
||||
id 'elasticsearch.publish'
|
||||
}
|
||||
|
||||
|
||||
version = "1.0"
|
||||
group = 'org.acme'
|
||||
description = "custom project description"
|
||||
|
@ -92,11 +92,11 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
id 'elasticsearch.publish'
|
||||
id 'com.github.johnrengelman.shadow'
|
||||
}
|
||||
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
|
||||
dependencies {
|
||||
implementation 'org.slf4j:log4j-over-slf4j:1.7.30'
|
||||
shadow 'org.slf4j:slf4j-api:1.7.30'
|
||||
|
@ -110,8 +110,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
}
|
||||
}
|
||||
version = "1.0"
|
||||
group = 'org.acme'
|
||||
description = 'some description'
|
||||
group = 'org.acme'
|
||||
description = 'some description'
|
||||
"""
|
||||
|
||||
when:
|
||||
|
@ -179,7 +179,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
shadow project(":someLib")
|
||||
shadow project(":someLib")
|
||||
}
|
||||
publishing {
|
||||
repositories {
|
||||
|
@ -192,10 +192,10 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
allprojects {
|
||||
apply plugin: 'elasticsearch.java'
|
||||
version = "1.0"
|
||||
group = 'org.acme'
|
||||
group = 'org.acme'
|
||||
}
|
||||
|
||||
description = 'some description'
|
||||
description = 'some description'
|
||||
"""
|
||||
|
||||
when:
|
||||
|
@ -263,13 +263,13 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
id 'elasticsearch.publish'
|
||||
id 'com.github.johnrengelman.shadow'
|
||||
}
|
||||
|
||||
|
||||
esplugin {
|
||||
name = 'hello-world-plugin'
|
||||
classname 'org.acme.HelloWorldPlugin'
|
||||
description = "custom project description"
|
||||
}
|
||||
|
||||
|
||||
publishing {
|
||||
repositories {
|
||||
maven {
|
||||
|
@ -277,17 +277,17 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// requires elasticsearch artifact available
|
||||
tasks.named('bundlePlugin').configure { enabled = false }
|
||||
licenseFile.set(file('license.txt'))
|
||||
noticeFile.set(file('notice.txt'))
|
||||
version = "1.0"
|
||||
group = 'org.acme'
|
||||
group = 'org.acme'
|
||||
"""
|
||||
|
||||
when:
|
||||
def result = gradleRunner('assemble', '--stacktrace').build()
|
||||
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build()
|
||||
|
||||
then:
|
||||
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
|
||||
|
@ -348,19 +348,19 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
id 'elasticsearch.internal-es-plugin'
|
||||
id 'elasticsearch.publish'
|
||||
}
|
||||
|
||||
|
||||
esplugin {
|
||||
name = 'hello-world-plugin'
|
||||
classname 'org.acme.HelloWorldPlugin'
|
||||
description = "custom project description"
|
||||
}
|
||||
|
||||
|
||||
// requires elasticsearch artifact available
|
||||
tasks.named('bundlePlugin').configure { enabled = false }
|
||||
licenseFile.set(file('license.txt'))
|
||||
noticeFile.set(file('notice.txt'))
|
||||
version = "2.0"
|
||||
group = 'org.acme'
|
||||
group = 'org.acme'
|
||||
"""
|
||||
|
||||
when:
|
||||
|
@ -420,9 +420,9 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
|
|||
apply plugin:'elasticsearch.publish'
|
||||
|
||||
version = "1.0"
|
||||
group = 'org.acme'
|
||||
group = 'org.acme'
|
||||
description = "just a test project"
|
||||
|
||||
|
||||
ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
|
||||
"""
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import groovy.lang.Closure;
|
|||
import org.elasticsearch.gradle.internal.conventions.util.Util;
|
||||
import org.elasticsearch.gradle.internal.info.BuildParams;
|
||||
import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin;
|
||||
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
|
||||
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
|
||||
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
|
||||
|
@ -36,6 +37,7 @@ public class BaseInternalPluginBuildPlugin implements Plugin<Project> {
|
|||
project.getPluginManager().apply(PluginBuildPlugin.class);
|
||||
project.getPluginManager().apply(JarHellPrecommitPlugin.class);
|
||||
project.getPluginManager().apply(ElasticsearchJavaPlugin.class);
|
||||
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
|
||||
// Clear default dependencies added by public PluginBuildPlugin as we add our
|
||||
// own project dependencies for internal builds
|
||||
// TODO remove once we removed default dependencies from PluginBuildPlugin
|
||||
|
|
|
@ -11,6 +11,7 @@ package org.elasticsearch.gradle.internal;
|
|||
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
|
||||
import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks;
|
||||
import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin;
|
||||
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
|
||||
import org.gradle.api.InvalidUserDataException;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
|
@ -61,6 +62,7 @@ public class BuildPlugin implements Plugin<Project> {
|
|||
project.getPluginManager().apply(ElasticsearchJavadocPlugin.class);
|
||||
project.getPluginManager().apply(DependenciesInfoPlugin.class);
|
||||
project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class);
|
||||
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
|
||||
InternalPrecommitTasks.create(project, true);
|
||||
configureLicenseAndNotice(project);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.internal.test;
|
||||
|
||||
import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
|
||||
import org.gradle.api.tasks.SourceSet;
|
||||
import org.gradle.api.tasks.SourceSetContainer;
|
||||
import org.gradle.api.tasks.TaskProvider;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing.
|
||||
*/
|
||||
public class HistoricalFeaturesMetadataPlugin implements Plugin<Project> {
|
||||
public static final String HISTORICAL_FEATURES_JSON = "historical-features.json";
|
||||
public static final String FEATURES_METADATA_TYPE = "features-metadata-json";
|
||||
public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata";
|
||||
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
Configuration featureMetadataExtractorConfig = project.getConfigurations().create("featuresMetadataExtractor", c -> {
|
||||
// Don't bother adding this dependency if the project doesn't exist which simplifies testing
|
||||
if (project.findProject(":test:metadata-extractor") != null) {
|
||||
c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":test:metadata-extractor"))));
|
||||
}
|
||||
});
|
||||
|
||||
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
|
||||
SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
|
||||
|
||||
TaskProvider<HistoricalFeaturesMetadataTask> generateTask = project.getTasks()
|
||||
.register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> {
|
||||
task.setClasspath(
|
||||
featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath())
|
||||
.plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME))
|
||||
);
|
||||
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON));
|
||||
});
|
||||
|
||||
Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
|
||||
c.setCanBeResolved(false);
|
||||
c.setCanBeConsumed(true);
|
||||
c.attributes(a -> { a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, FEATURES_METADATA_TYPE); });
|
||||
});
|
||||
|
||||
project.getArtifacts().add(featuresMetadataArtifactConfig.getName(), generateTask);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.internal.test;
|
||||
|
||||
import org.elasticsearch.gradle.LoggedExec;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.file.ConfigurableFileCollection;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.file.RegularFileProperty;
|
||||
import org.gradle.api.tasks.CacheableTask;
|
||||
import org.gradle.api.tasks.Classpath;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.process.ExecOperations;
|
||||
import org.gradle.workers.WorkAction;
|
||||
import org.gradle.workers.WorkParameters;
|
||||
import org.gradle.workers.WorkerExecutor;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
@CacheableTask
|
||||
public abstract class HistoricalFeaturesMetadataTask extends DefaultTask {
|
||||
private FileCollection classpath;
|
||||
|
||||
@OutputFile
|
||||
public abstract RegularFileProperty getOutputFile();
|
||||
|
||||
@Classpath
|
||||
public FileCollection getClasspath() {
|
||||
return classpath;
|
||||
}
|
||||
|
||||
public void setClasspath(FileCollection classpath) {
|
||||
this.classpath = classpath;
|
||||
}
|
||||
|
||||
@Inject
|
||||
public abstract WorkerExecutor getWorkerExecutor();
|
||||
|
||||
@TaskAction
|
||||
public void execute() {
|
||||
getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> {
|
||||
params.getClasspath().setFrom(getClasspath());
|
||||
params.getOutputFile().set(getOutputFile());
|
||||
});
|
||||
}
|
||||
|
||||
public interface HistoricalFeaturesWorkParameters extends WorkParameters {
|
||||
ConfigurableFileCollection getClasspath();
|
||||
|
||||
RegularFileProperty getOutputFile();
|
||||
}
|
||||
|
||||
public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction<HistoricalFeaturesWorkParameters> {
|
||||
private final ExecOperations execOperations;
|
||||
|
||||
@Inject
|
||||
public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) {
|
||||
this.execOperations = execOperations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
LoggedExec.javaexec(execOperations, spec -> {
|
||||
spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor");
|
||||
spec.classpath(getParameters().getClasspath());
|
||||
spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath());
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.gradle.internal.ElasticsearchJavaPlugin;
|
|||
import org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin;
|
||||
import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin;
|
||||
import org.elasticsearch.gradle.internal.info.BuildParams;
|
||||
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
|
||||
import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin;
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
|
||||
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
|
||||
|
@ -35,6 +36,7 @@ import org.gradle.api.Project;
|
|||
import org.gradle.api.Task;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.artifacts.Dependency;
|
||||
import org.gradle.api.artifacts.DependencySet;
|
||||
import org.gradle.api.artifacts.ProjectDependency;
|
||||
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
|
||||
import org.gradle.api.attributes.Attribute;
|
||||
|
@ -74,6 +76,9 @@ public class RestTestBasePlugin implements Plugin<Project> {
|
|||
private static final String PLUGINS_CONFIGURATION = "clusterPlugins";
|
||||
private static final String EXTRACTED_PLUGINS_CONFIGURATION = "extractedPlugins";
|
||||
private static final Attribute<String> CONFIGURATION_ATTRIBUTE = Attribute.of("test-cluster-artifacts", String.class);
|
||||
private static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadataDeps";
|
||||
private static final String DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION = "defaultDistrofeaturesMetadataDeps";
|
||||
private static final String TESTS_FEATURES_METADATA_PATH = "tests.features.metadata.path";
|
||||
|
||||
private final ProviderFactory providerFactory;
|
||||
|
||||
|
@ -107,6 +112,36 @@ public class RestTestBasePlugin implements Plugin<Project> {
|
|||
extractedPluginsConfiguration.extendsFrom(pluginsConfiguration);
|
||||
configureArtifactTransforms(project);
|
||||
|
||||
// Create configuration for aggregating historical feature metadata
|
||||
Configuration featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
|
||||
c.setCanBeConsumed(false);
|
||||
c.setCanBeResolved(true);
|
||||
c.attributes(
|
||||
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
|
||||
);
|
||||
c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server"))));
|
||||
c.withDependencies(dependencies -> {
|
||||
// We can't just use Configuration#extendsFrom() here as we'd inherit the wrong project configuration
|
||||
copyDependencies(project, dependencies, modulesConfiguration);
|
||||
copyDependencies(project, dependencies, pluginsConfiguration);
|
||||
});
|
||||
});
|
||||
|
||||
Configuration defaultDistroFeatureMetadataConfig = project.getConfigurations()
|
||||
.create(DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION, c -> {
|
||||
c.setCanBeConsumed(false);
|
||||
c.setCanBeResolved(true);
|
||||
c.attributes(
|
||||
a -> a.attribute(
|
||||
ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE,
|
||||
HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE
|
||||
)
|
||||
);
|
||||
c.defaultDependencies(
|
||||
d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata")))
|
||||
);
|
||||
});
|
||||
|
||||
// For plugin and module projects, register the current project plugin bundle as a dependency
|
||||
project.getPluginManager().withPlugin("elasticsearch.esplugin", plugin -> {
|
||||
if (GradleUtils.isModuleProject(project.getPath())) {
|
||||
|
@ -124,6 +159,10 @@ public class RestTestBasePlugin implements Plugin<Project> {
|
|||
task.dependsOn(integTestDistro, modulesConfiguration);
|
||||
registerDistributionInputs(task, integTestDistro);
|
||||
|
||||
// Pass feature metadata on to tests
|
||||
task.getInputs().files(featureMetadataConfig).withPathSensitivity(PathSensitivity.NONE);
|
||||
nonInputSystemProperties.systemProperty(TESTS_FEATURES_METADATA_PATH, () -> featureMetadataConfig.getAsPath());
|
||||
|
||||
// Enable parallel execution for these tests since each test gets its own cluster
|
||||
task.setMaxParallelForks(task.getProject().getGradle().getStartParameter().getMaxWorkerCount() / 2);
|
||||
nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks()));
|
||||
|
@ -163,6 +202,11 @@ public class RestTestBasePlugin implements Plugin<Project> {
|
|||
DEFAULT_DISTRIBUTION_SYSPROP,
|
||||
providerFactory.provider(() -> defaultDistro.getExtracted().getSingleFile().getPath())
|
||||
);
|
||||
|
||||
// If we are using the default distribution we need to register all module feature metadata
|
||||
task.getInputs().files(defaultDistroFeatureMetadataConfig).withPathSensitivity(PathSensitivity.NONE);
|
||||
nonInputSystemProperties.systemProperty(TESTS_FEATURES_METADATA_PATH, defaultDistroFeatureMetadataConfig::getAsPath);
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
@ -198,6 +242,14 @@ public class RestTestBasePlugin implements Plugin<Project> {
|
|||
});
|
||||
}
|
||||
|
||||
private void copyDependencies(Project project, DependencySet dependencies, Configuration configuration) {
|
||||
configuration.getDependencies()
|
||||
.stream()
|
||||
.filter(d -> d instanceof ProjectDependency)
|
||||
.map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependency) d).getDependencyProject().getPath())))
|
||||
.forEach(dependencies::add);
|
||||
}
|
||||
|
||||
private ElasticsearchDistribution createDistribution(Project project, String name, String version) {
|
||||
return createDistribution(project, name, version, null);
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.gradle.internal.ConcatFilesTask
|
|||
import org.elasticsearch.gradle.internal.DependenciesInfoPlugin
|
||||
import org.elasticsearch.gradle.internal.NoticeTask
|
||||
import org.elasticsearch.gradle.internal.info.BuildParams
|
||||
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
@ -30,6 +31,15 @@ configurations {
|
|||
attribute(Category.CATEGORY_ATTRIBUTE, project.getObjects().named(Category.class, Category.DOCUMENTATION))
|
||||
}
|
||||
}
|
||||
featuresMetadata {
|
||||
attributes {
|
||||
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
featuresMetadata project(':server')
|
||||
}
|
||||
|
||||
def thisProj = project
|
||||
|
@ -196,6 +206,7 @@ project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each {
|
|||
}
|
||||
|
||||
distro.copyModule(processDefaultOutputsTaskProvider, module)
|
||||
dependencies.add('featuresMetadata', module)
|
||||
if (module.name.startsWith('transport-') || (BuildParams.snapshotBuild == false && module.name == 'apm')) {
|
||||
distro.copyModule(processIntegTestOutputsTaskProvider, module)
|
||||
}
|
||||
|
@ -214,6 +225,7 @@ xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule ->
|
|||
}
|
||||
}
|
||||
distro.copyModule(processDefaultOutputsTaskProvider, xpackModule)
|
||||
dependencies.add('featuresMetadata', xpackModule)
|
||||
if (xpackModule.name.equals('core') || xpackModule.name.equals('security')) {
|
||||
distro.copyModule(processIntegTestOutputsTaskProvider, xpackModule)
|
||||
}
|
||||
|
|
|
@ -106,7 +106,8 @@ List projects = [
|
|||
'test:logger-usage',
|
||||
'test:test-clusters',
|
||||
'test:x-content',
|
||||
'test:yaml-rest-runner'
|
||||
'test:yaml-rest-runner',
|
||||
'test:metadata-extractor'
|
||||
]
|
||||
|
||||
/**
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.elasticsearch.core.IOUtils;
|
|||
import org.elasticsearch.core.Nullable;
|
||||
import org.elasticsearch.core.PathUtils;
|
||||
import org.elasticsearch.core.TimeValue;
|
||||
import org.elasticsearch.features.NodeFeature;
|
||||
import org.elasticsearch.health.node.selection.HealthNode;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.IndexVersion;
|
||||
|
@ -75,6 +76,7 @@ import java.io.BufferedReader;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.CharBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
|
@ -90,6 +92,7 @@ import java.security.cert.CertificateException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -132,6 +135,8 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
public static final String CLIENT_SOCKET_TIMEOUT = "client.socket.timeout";
|
||||
public static final String CLIENT_PATH_PREFIX = "client.path.prefix";
|
||||
|
||||
private static Map<NodeFeature, Version> historicalFeatures;
|
||||
|
||||
/**
|
||||
* Convert the entity from a {@link Response} into a map of maps.
|
||||
*/
|
||||
|
@ -2213,4 +2218,31 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
protected Map<NodeFeature, Version> getHistoricalFeatures() {
|
||||
if (historicalFeatures == null) {
|
||||
Map<NodeFeature, Version> historicalFeaturesMap = new HashMap<>();
|
||||
String metadataPath = System.getProperty("tests.features.metadata.path");
|
||||
if (metadataPath == null) {
|
||||
throw new UnsupportedOperationException("Historical features information is unavailable when using legacy test plugins.");
|
||||
}
|
||||
|
||||
String[] metadataFiles = metadataPath.split(System.getProperty("path.separator"));
|
||||
for (String metadataFile : metadataFiles) {
|
||||
try (
|
||||
InputStream in = Files.newInputStream(PathUtils.get(metadataFile));
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, in)
|
||||
) {
|
||||
for (Map.Entry<String, String> entry : parser.mapStrings().entrySet()) {
|
||||
historicalFeaturesMap.put(new NodeFeature(entry.getKey()), Version.fromString(entry.getValue()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
historicalFeatures = Collections.unmodifiableMap(historicalFeaturesMap);
|
||||
}
|
||||
|
||||
return historicalFeatures;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
plugins {
|
||||
id 'elasticsearch.java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':server')
|
||||
testImplementation project(':test:framework')
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.extractor.features;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.features.FeatureSpecification;
|
||||
import org.elasticsearch.features.NodeFeature;
|
||||
import org.elasticsearch.xcontent.XContentGenerator;
|
||||
import org.elasticsearch.xcontent.json.JsonXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.InvalidPathException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
|
||||
public class HistoricalFeaturesMetadataExtractor {
|
||||
private final ClassLoader classLoader;
|
||||
|
||||
public HistoricalFeaturesMetadataExtractor(ClassLoader classLoader) {
|
||||
this.classLoader = classLoader;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
if (args.length != 1) {
|
||||
printUsageAndExit();
|
||||
}
|
||||
|
||||
Path outputFile = null;
|
||||
try {
|
||||
outputFile = Paths.get(args[0]);
|
||||
} catch (InvalidPathException e) {
|
||||
printUsageAndExit();
|
||||
}
|
||||
|
||||
new HistoricalFeaturesMetadataExtractor(HistoricalFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile(
|
||||
outputFile
|
||||
);
|
||||
}
|
||||
|
||||
public void generateMetadataFile(Path outputFile) {
|
||||
try (
|
||||
OutputStream os = Files.newOutputStream(outputFile, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE);
|
||||
XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os)
|
||||
) {
|
||||
generator.writeStartObject();
|
||||
for (Map.Entry<NodeFeature, Version> entry : extractHistoricalFeatureMetadata().entrySet()) {
|
||||
generator.writeStringField(entry.getKey().id(), entry.getValue().toString());
|
||||
}
|
||||
generator.writeEndObject();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public Map<NodeFeature, Version> extractHistoricalFeatureMetadata() {
|
||||
Map<NodeFeature, Version> historicalFeatures = new HashMap<>();
|
||||
ServiceLoader<FeatureSpecification> featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader);
|
||||
for (FeatureSpecification featureSpecification : featureSpecLoader) {
|
||||
historicalFeatures.putAll(featureSpecification.getHistoricalFeatures());
|
||||
}
|
||||
|
||||
return historicalFeatures;
|
||||
}
|
||||
|
||||
private static void printUsageAndExit() {
|
||||
System.err.println("Usage: HistoricalFeaturesMetadataExtractor <output file>");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.extractor.features;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.features.NodeFeature;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xcontent.XContentParser;
|
||||
import org.elasticsearch.xcontent.json.JsonXContent;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY;
|
||||
import static org.hamcrest.Matchers.anEmptyMap;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase {
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
public void testExtractHistoricalMetadata() throws IOException {
|
||||
HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader());
|
||||
Map<NodeFeature, Version> nodeFeatureVersionMap = extractor.extractHistoricalFeatureMetadata();
|
||||
assertThat(nodeFeatureVersionMap, not(anEmptyMap()));
|
||||
|
||||
Path outputFile = temporaryFolder.newFile().toPath();
|
||||
extractor.generateMetadataFile(outputFile);
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) {
|
||||
Map<String, String> parsedMap = parser.mapStrings();
|
||||
for (Map.Entry<NodeFeature, Version> entry : nodeFeatureVersionMap.entrySet()) {
|
||||
assertThat(parsedMap, hasEntry(entry.getKey().id(), entry.getValue().toString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue