elasticsearch/build.gradle

522 lines
20 KiB
Groovy
Raw Normal View History

/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import com.avast.gradle.dockercompose.tasks.ComposePull
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.internal.BuildPlugin
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.plugins.ide.eclipse.model.AccessRule
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.gradle.util.DistributionLocator
import org.gradle.util.GradleVersion
import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure
import org.gradle.plugins.ide.eclipse.model.ProjectDependency
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.elasticsearch.gradle.internal.test.RestTestBasePlugin
import org.elasticsearch.gradle.internal.InternalPluginBuildPlugin
import org.elasticsearch.gradle.internal.InternalTestClustersPlugin
plugins {
id 'lifecycle-base'
id 'elasticsearch.docker-support'
id 'elasticsearch.global-build-info'
id 'elasticsearch.build-scan'
id 'elasticsearch.build-complete'
id 'elasticsearch.jdk-download'
id 'elasticsearch.internal-distribution-download'
id 'elasticsearch.runtime-jdk-provision'
id 'elasticsearch.ide'
id 'elasticsearch.forbidden-dependencies'
id 'elasticsearch.formatting'
id 'elasticsearch.local-distribution'
id 'elasticsearch.fips'
id 'elasticsearch.internal-testclusters'
id 'elasticsearch.run'
id "com.diffplug.spotless" version "5.12.5" apply false
}
2019-07-17 00:34:00 +08:00
// common maven publishing configuration
allprojects {
2016-03-18 03:21:31 +08:00
group = 'org.elasticsearch'
version = VersionProperties.elasticsearch
description = "Elasticsearch subproject ${project.path}"
}
String licenseCommit
if (VersionProperties.elasticsearch.toString().endsWith('-SNAPSHOT')) {
licenseCommit = BuildParams.gitRevision ?: "master" // leniency for non git builds
} else {
licenseCommit = "v${version}"
}
subprojects {
// We disable this plugin for now till we shaked out the issues we see
// e.g. see https://github.com/elastic/elasticsearch/issues/72169
// apply plugin:'elasticsearch.internal-test-rerun'
plugins.withType(BuildPlugin).whenPluginAdded {
project.licenseFile = project.rootProject.file('licenses/SSPL-1.0+ELASTIC-LICENSE-2.0.txt')
project.noticeFile = project.rootProject.file('NOTICE.txt')
}
plugins.withType(InternalPluginBuildPlugin).whenPluginAdded {
project.dependencies {
compileOnly project(":server")
testImplementation project(":test:framework")
}
}
// Ultimately the RestTestBase Plugin should apply the InternalTestClusters Plugin itself instead of TestClusters
// but this requires major rework on the func test infrastructure.
// TODO: This will be addressed once we have https://github.com/elastic/elasticsearch/issues/71593 resolved
project.plugins.withType(RestTestBasePlugin) {
project.plugins.apply(InternalTestClustersPlugin)
}
}
/**
* This is a convenient method for declaring test artifact dependencies provided by the internal
* test artifact plugin. It replaces basically the longer dependency notation with explicit capability
* declaration like this:
*
* testImplementation(project(xpackModule('repositories-metering-api'))) {
* capabilities {
* requireCapability("org.elasticsearch.gradle:repositories-metering-api-test-artifacts")
* }
* }
*
* */
ext.testArtifact = { p, String name = "test" ->
def projectDependency = p.dependencies.create(p)
projectDependency.capabilities {
requireCapabilities("org.elasticsearch.gradle:${projectDependency.name}-${name}-artifacts")
};
}
tasks.register("updateCIBwcVersions") {
doLast {
File yml = file(".ci/bwcVersions")
yml.text = ""
yml << "BWC_VERSION:\n"
BuildParams.bwcVersions.indexCompatible.each {
yml << " - \"$it\"\n"
}
}
}
// build metadata from previous build, contains eg hashes for bwc builds
String buildMetadataValue = System.getenv('BUILD_METADATA')
if (buildMetadataValue == null) {
buildMetadataValue = ''
}
Map<String, String> buildMetadataMap = buildMetadataValue.tokenize(';').collectEntries {
def (String key, String value) = it.split('=')
return [key, value]
}
// injecting groovy property variables into all projects
allprojects {
project.ext {
// for ide hacks...
isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
gradle.startParameter.taskNames.contains('cleanEclipse')
buildMetadata = buildMetadataMap
}
}
tasks.register("verifyVersions") {
doLast {
if (gradle.startParameter.isOffline()) {
throw new GradleException("Must run in online mode to verify versions")
}
// Read the list from maven central.
// Fetch the metadata and parse the xml into Version instances because it's more straight forward here
// rather than bwcVersion ( VersionCollection ).
new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
BuildParams.bwcVersions.compareToAuthoritative(
new XmlParser().parse(s)
.versioning.versions.version
.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }
.collect { Version.fromString(it) }
)
}
String ciYml = file(".ci/bwcVersions").text
BuildParams.bwcVersions.indexCompatible.each {
if (ciYml.contains("\"$it\"\n") == false) {
throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results");
}
}
}
}
/*
* When adding backcompat behavior that spans major versions, temporarily
* disabling the backcompat tests is necessary. This flag controls
* the enabled state of every bwc task. It should be set back to true
* after the backport of the backcompat code is complete.
*/
boolean bwc_tests_enabled = true
// place a PR link here when committing bwc changes:
String bwc_tests_disabled_issue = ""
Ensure CI is run in FIPS 140 approved only mode (#64024) We were depending on the BouncyCastle FIPS own mechanics to set itself in approved only mode since we run with the Security Manager enabled. The check during startup seems to happen before we set our restrictive SecurityManager though in org.elasticsearch.bootstrap.Elasticsearch , and this means that BCFIPS would not be in approved only mode, unless explicitly configured so. This commit sets the appropriate JVM property to explicitly set BCFIPS in approved only mode in CI and adds tests to ensure that we will be running with BCFIPS in approved only mode when we expect to. It also sets xpack.security.fips_mode.enabled to true for all test clusters used in fips mode and sets the distribution to the default one. It adds a password to the elasticsearch keystore for all test clusters that run in fips mode. Moreover, it changes a few unit tests where we would use bcrypt even in FIPS 140 mode. These would still pass since we are bundling our own bcrypt implementation, but are now changed to use FIPS 140 approved algorithms instead for better coverage. It also addresses a number of tests that would fail in approved only mode Mainly: Tests that use PBKDF2 with a password less than 112 bits (14char). We elected to change the passwords used everywhere to be at least 14 characters long instead of mandating the use of pbkdf2_stretch because both pbkdf2 and pbkdf2_stretch are supported and allowed in fips mode and it makes sense to test with both. We could possibly figure out the password algorithm used for each test and adjust password length accordingly only for pbkdf2 but there is little value in that. It's good practice to use strong passwords so if our docs and tests use longer passwords, then it's for the best. The approach is brittle as there is no guarantee that the next test that will be added won't use a short password, so we add some testing documentation too. This leaves us with a possible coverage gap since we do support passwords as short as 6 characters but we only test with > 14 chars but the validation itself was not tested even before. Tests can be added in a followup, outside of fips related context. Tests that use a PKCS12 keystore and were not already muted. Tests that depend on running test clusters with a basic license or using the OSS distribution as FIPS 140 support is not available in neither of these. Finally, it adds some information around FIPS 140 testing in our testing documentation reference so that developers can hopefully keep in mind fips 140 related intricacies when writing/changing docs.
2020-12-24 03:00:49 +08:00
/*
* FIPS 140-2 behavior was fixed in 7.11.0. Before that there is no way to run elasticsearch in a
* JVM that is properly configured to be in fips mode with BCFIPS. For now we need to disable
* all bwc testing in fips mode.
*/
if ( BuildParams.inFipsJvm ) {
bwc_tests_enabled = false
bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/issues/66772"
Ensure CI is run in FIPS 140 approved only mode (#64024) We were depending on the BouncyCastle FIPS own mechanics to set itself in approved only mode since we run with the Security Manager enabled. The check during startup seems to happen before we set our restrictive SecurityManager though in org.elasticsearch.bootstrap.Elasticsearch , and this means that BCFIPS would not be in approved only mode, unless explicitly configured so. This commit sets the appropriate JVM property to explicitly set BCFIPS in approved only mode in CI and adds tests to ensure that we will be running with BCFIPS in approved only mode when we expect to. It also sets xpack.security.fips_mode.enabled to true for all test clusters used in fips mode and sets the distribution to the default one. It adds a password to the elasticsearch keystore for all test clusters that run in fips mode. Moreover, it changes a few unit tests where we would use bcrypt even in FIPS 140 mode. These would still pass since we are bundling our own bcrypt implementation, but are now changed to use FIPS 140 approved algorithms instead for better coverage. It also addresses a number of tests that would fail in approved only mode Mainly: Tests that use PBKDF2 with a password less than 112 bits (14char). We elected to change the passwords used everywhere to be at least 14 characters long instead of mandating the use of pbkdf2_stretch because both pbkdf2 and pbkdf2_stretch are supported and allowed in fips mode and it makes sense to test with both. We could possibly figure out the password algorithm used for each test and adjust password length accordingly only for pbkdf2 but there is little value in that. It's good practice to use strong passwords so if our docs and tests use longer passwords, then it's for the best. The approach is brittle as there is no guarantee that the next test that will be added won't use a short password, so we add some testing documentation too. This leaves us with a possible coverage gap since we do support passwords as short as 6 characters but we only test with > 14 chars but the validation itself was not tested even before. Tests can be added in a followup, outside of fips related context. Tests that use a PKCS12 keystore and were not already muted. Tests that depend on running test clusters with a basic license or using the OSS distribution as FIPS 140 support is not available in neither of these. Finally, it adds some information around FIPS 140 testing in our testing documentation reference so that developers can hopefully keep in mind fips 140 related intricacies when writing/changing docs.
2020-12-24 03:00:49 +08:00
}
if (bwc_tests_enabled == false) {
if (bwc_tests_disabled_issue.isEmpty()) {
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")
}
println "========================= WARNING ========================="
println " Backwards compatibility tests are disabled!"
println "See ${bwc_tests_disabled_issue}"
println "==========================================================="
}
if (project.gradle.startParameter.taskNames.find { it.startsWith("checkPart") } != null) {
// Disable BWC tests for checkPart* tasks as it's expected that this will run un it's own check
bwc_tests_enabled = false
}
subprojects {
ext.bwc_tests_enabled = bwc_tests_enabled
}
tasks.register("verifyBwcTestsEnabled") {
doLast {
if (bwc_tests_enabled == false) {
throw new GradleException('Bwc tests are disabled. They must be re-enabled after completing backcompat behavior backporting.')
}
}
}
tasks.register("branchConsistency") {
description 'Ensures this branch is internally consistent. For example, that versions constants match released versions.'
group 'Verification'
dependsOn ":verifyVersions", ":verifyBwcTestsEnabled"
}
allprojects {
// ignore missing javadocs
tasks.withType(Javadoc).configureEach { Javadoc javadoc ->
// the -quiet here is because of a bug in gradle, in that adding a string option
// by itself is not added to the options. By adding quiet, both this option and
// the "value" -quiet is added, separated by a space. This is ok since the javadoc
// command already adds -quiet, so we are just duplicating it
// see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959
javadoc.options.encoding = 'UTF8'
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
}
project.afterEvaluate {
// Handle javadoc dependencies across projects. Order matters: the linksOffline for
// org.elasticsearch:elasticsearch must be the last one or all the links for the
// other packages (e.g org.elasticsearch.client) will point to server rather than
// their own artifacts.
if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) {
String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
Closure sortClosure = { a, b -> b.group <=> a.group }
Closure depJavadocClosure = { shadowed, dep ->
if ((dep instanceof ProjectDependency) == false) {
return
}
Project upstreamProject = dep.dependencyProject
if (upstreamProject == null) {
return
}
if (shadowed) {
/*
* Include the source of shadowed upstream projects so we don't
* have to publish their javadoc.
*/
project.evaluationDependsOn(upstreamProject.path)
project.javadoc.source += upstreamProject.javadoc.source
/*
* Instead we need the upstream project's javadoc classpath so
* we don't barf on the classes that it references.
*/
project.javadoc.classpath += upstreamProject.javadoc.classpath
} else {
// Link to non-shadowed dependant projects
project.javadoc.dependsOn "${upstreamProject.path}:javadoc"
String externalLinkName = upstreamProject.archivesBaseName
String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + externalLinkName.replaceAll('\\.', '/') + '/' + dep.version
project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/"
}
}
boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin)
project.configurations.compileClasspath.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(hasShadow, c) })
project.configurations.compileOnly.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
if (hasShadow) {
// include any dependencies for shadow JAR projects that are *not* bundled in the shadow JAR
project.configurations.shadow.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
}
}
}
}
// Ensure similar tasks in dependent projects run first. The projectsEvaluated here is
// important because, while dependencies.all will pickup future dependencies,
// it is not necessarily true that the task exists in both projects at the time
// the dependency is added.
gradle.projectsEvaluated {
allprojects {
if (project.path == ':test:framework') {
// :test:framework:test cannot run before and after :server:test
return
}
tasks.matching { it.name.equals('integTest')}.configureEach {integTestTask ->
integTestTask.mustRunAfter tasks.matching { it.name.equals("test") }
Upgrade gradle wrapper to 4.8 (#31525) * Move to Gradle 4.8 RC1 * Use latest version of plugin The current does not work with Gradle 4.8 RC1 * Switch to Gradle GA * Add and configure build compare plugin * add work-around for https://github.com/gradle/gradle/issues/5692 * work around https://github.com/gradle/gradle/issues/5696 * Make use of Gradle build compare with reference project * Make the manifest more compare friendly * Clear the manifest in compare friendly mode * Remove animalsniffer from buildscript classpath * Fix javadoc errors * Fix doc issues * reference Gradle issues in comments * Conditionally configure build compare * Fix some more doclint issues * fix typo in build script * Add sanity check to make sure the test task was replaced Relates to #31324. It seems like Gradle has an inconsistent behavior and the taks is not always replaced. * Include number of non conforming tasks in the exception. * No longer replace test task, create implicit instead Closes #31324. The issue has full context in comments. With this change the `test` task becomes nothing more than an alias for `utest`. Some of the stand alone tests that had a `test` task now have `integTest`, and a few of them that used to have `integTest` to run multiple tests now only have `check`. This will also help separarate unit/micro tests from integration tests. * Revert "No longer replace test task, create implicit instead" This reverts commit f1ebaf7d93e4a0a19e751109bf620477dc35023c. * Fix replacement of the test task Based on information from gradle/gradle#5730 replace the task taking into account the task providres. Closes #31324. * Only apply build comapare plugin if needed * Make sure test runs before integTest * Fix doclint aftter merge * PR review comments * Switch to Gradle 4.8.1 and remove workaround * PR review comments * Consolidate task ordering
2018-06-28 13:13:21 +08:00
}
configurations.matching { it.canBeResolved }.all { Configuration configuration ->
dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep ->
Project upstreamProject = dep.dependencyProject
if (project.path != upstreamProject?.path) {
for (String taskName : ['test', 'integTest']) {
project.tasks.matching { it.name == taskName }.configureEach {task ->
task.shouldRunAfter(upstreamProject.tasks.matching { upStreamTask -> upStreamTask.name == taskName })
}
}
}
}
}
}
}
2015-10-31 14:39:35 +08:00
// eclipse configuration
2015-10-31 12:32:16 +08:00
allprojects {
apply plugin: 'eclipse'
// Name all the non-root projects after their path so that paths get grouped together when imported into eclipse.
if (path != ':') {
eclipse.project.name = path
2016-04-28 22:08:21 +08:00
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
eclipse.project.name = eclipse.project.name.replace(':', '_')
}
}
2015-10-31 14:39:35 +08:00
plugins.withType(JavaBasePlugin) {
java.modularity.inferModulePath.set(false)
2015-10-31 14:39:35 +08:00
eclipse.classpath.file.whenMerged { classpath ->
/*
* give each source folder a unique corresponding output folder
* outside of the usual `build` folder. We can't put the build
* in the usual build folder because eclipse becomes *very* sad
* if we delete it. Which `gradlew clean` does all the time.
*/
2015-10-31 14:39:35 +08:00
int i = 0;
classpath.entries.findAll { it instanceof SourceFolder }.each { folder ->
i++;
folder.output = 'out/eclipse/' + i
2015-10-31 14:39:35 +08:00
}
// Starting with Gradle 6.7 test dependencies are not exposed by eclipse
// projects by default. This breaks project dependencies using the `java-test-fixtures` plugin
// or dependencies on other projects manually declared testArtifacts configurations
// This issue is tracked in gradle issue tracker.
// See https://github.com/gradle/gradle/issues/14932 for further details
classpath.entries.findAll {
it instanceof ProjectDependency
}.each {
it.entryAttributes.remove('without_test_code')
}
}
}
/*
* Allow accessing com/sun/net/httpserver in projects that have
* configured forbidden apis to allow it.
*/
plugins.withType(ForbiddenApisPlugin) {
eclipse.classpath.file.whenMerged { classpath ->
if (false == forbiddenApisTest.bundledSignatures.contains('jdk-non-portable')) {
classpath.entries
.findAll { it.kind == "con" && it.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER") }
.each {
it.accessRules.add(new AccessRule("accessible", "com/sun/net/httpserver/*"))
}
}
}
}
File licenseHeaderFile
String prefix = ':x-pack'
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
prefix = prefix.replace(':', '_')
}
if (eclipse.project.name.startsWith(prefix)) {
licenseHeaderFile = new File(project.rootDir, 'build-tools-internal/src/main/resources/license-headers/elastic-license-2.0-header.txt')
} else {
licenseHeaderFile = new File(project.rootDir, 'build-tools-internal/src/main/resources/license-headers/sspl+elastic-license-header.txt')
}
String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n'
String licenseHeader = licenseHeaderFile.getText('UTF-8').replace(System.lineSeparator(), lineSeparator)
tasks.register('copyEclipseSettings', Copy) {
mustRunAfter 'wipeEclipseSettings'
// TODO: "package this up" for external builds
from new File(project.rootDir, 'build-tools-internal/src/main/resources/eclipse.settings')
into '.settings'
filter { it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader) }
}
// otherwise .settings is not nuked entirely
tasks.register('wipeEclipseSettings', Delete) {
delete '.settings'
}
tasks.named('cleanEclipse') { dependsOn 'wipeEclipseSettings' }
// otherwise the eclipse merging is *super confusing*
tasks.named('eclipse') { dependsOn 'cleanEclipse', 'copyEclipseSettings' }
}
tasks.named("wrapper").configure {
distributionType = 'ALL'
doLast {
final DistributionLocator locator = new DistributionLocator()
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
final String sha256Sum = new String(sha256Uri.toURL().bytes)
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
println "Added checksum to wrapper properties"
// Update build-tools to reflect the Gradle upgrade
// TODO: we can remove this once we have tests to make sure older versions work.
project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion
println "Updated minimum Gradle Version"
}
}
gradle.projectsEvaluated {
subprojects {
/*
* Remove assemble/dependenciesInfo on all qa projects because we don't
* need to publish artifacts for them.
*/
if (project.name.equals('qa') || project.path.contains(':qa:')) {
maybeConfigure(project.tasks, 'assemble') {
it.enabled = false
}
maybeConfigure(project.tasks, 'dependenciesInfo') {
it.enabled = false
Fix unknown licenses (#31223) The goal of this commit is to address unknown licenses when producing the dependencies info report. We have two different checks that we run on licenses. The first check is whether or not we have stashed a copy of the license text for a dependency in the repository. The second is to map every dependency to a license type (e.g., BSD 3-clause). The problem here is that the way we were handling licenses in the second check differs from how we handle licenses in the first check. The first check works by finding a license file with the name of the artifact followed by the text -LICENSE.txt. Yet in some cases we allow mapping an artifact name to another name used to check for the license (e.g., we map lucene-.* to lucene, and opensaml-.* to shibboleth. The second check understood the first way of looking for a license file but not the second way. So in this commit we teach the second check about the mappings from artifact names to license names. We do this by copying the configuration from the dependencyLicenses task to the dependenciesInfo task and then reusing the code from the first check in the second check. There were some other challenges here though. For example, dependenciesInfo was checking too many dependencies. For now, we should only be checking direct dependencies and leaving transitive dependencies from another org.elasticsearch artifact to that artifact (we want to do this differently in a follow-up). We also want to disable dependenciesInfo for projects that we do not publish, users only care about licenses they might be exposed to if they use our assembled products. With all of the changes in this commit we have eliminated all unknown licenses. A follow-up will enforce that when we add a new dependency it does not get mapped to unknown, these will be forbidden in the future. Therefore, with this change and earlier changes are left having no unknown licenses and two custom licenses; custom here means it does not map to an SPDX license type. Those two licenses are xz and ldapsdk. A future change will not allow additional custom licenses unless they are explicitly whitelisted. This ensures that if a new dependency is added it is mapped to an SPDX license or mapped to custom because it does not have an SPDX license.
2018-06-09 19:28:41 +08:00
}
maybeConfigure(project.tasks, 'dependenciesGraph') {
it.enabled = false
}
}
}
// Having the same group and name for distinct projects causes Gradle to consider them equal when resolving
// dependencies leading to hard to debug failures. Run a check across all project to prevent this from happening.
// see: https://github.com/gradle/gradle/issues/847
Map coordsToProject = [:]
project.allprojects.forEach { p ->
String coords = "${p.group}:${p.name}"
if (false == coordsToProject.putIfAbsent(coords, p)) {
throw new GradleException(
"Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " +
"have the same name and group: ${coords}. " +
"This doesn't currently work correctly in Gradle, see: " +
"https://github.com/gradle/gradle/issues/847"
)
}
}
}
Upgrade gradle wrapper to 4.8 (#31525) * Move to Gradle 4.8 RC1 * Use latest version of plugin The current does not work with Gradle 4.8 RC1 * Switch to Gradle GA * Add and configure build compare plugin * add work-around for https://github.com/gradle/gradle/issues/5692 * work around https://github.com/gradle/gradle/issues/5696 * Make use of Gradle build compare with reference project * Make the manifest more compare friendly * Clear the manifest in compare friendly mode * Remove animalsniffer from buildscript classpath * Fix javadoc errors * Fix doc issues * reference Gradle issues in comments * Conditionally configure build compare * Fix some more doclint issues * fix typo in build script * Add sanity check to make sure the test task was replaced Relates to #31324. It seems like Gradle has an inconsistent behavior and the taks is not always replaced. * Include number of non conforming tasks in the exception. * No longer replace test task, create implicit instead Closes #31324. The issue has full context in comments. With this change the `test` task becomes nothing more than an alias for `utest`. Some of the stand alone tests that had a `test` task now have `integTest`, and a few of them that used to have `integTest` to run multiple tests now only have `check`. This will also help separarate unit/micro tests from integration tests. * Revert "No longer replace test task, create implicit instead" This reverts commit f1ebaf7d93e4a0a19e751109bf620477dc35023c. * Fix replacement of the test task Based on information from gradle/gradle#5730 replace the task taking into account the task providres. Closes #31324. * Only apply build comapare plugin if needed * Make sure test runs before integTest * Fix doclint aftter merge * PR review comments * Switch to Gradle 4.8.1 and remove workaround * PR review comments * Consolidate task ordering
2018-06-28 13:13:21 +08:00
allprojects {
tasks.register('resolveAllDependencies', org.elasticsearch.gradle.internal.ResolveAllDependencies) {
configs = project.configurations
if (project.path.contains("fixture")) {
dependsOn tasks.withType(ComposePull)
}
}
// helper task to print direct dependencies of a single task
project.tasks.addRule("Pattern: <taskName>Dependencies") { String taskName ->
if (taskName.endsWith("Dependencies") == false) {
return
}
if (project.tasks.findByName(taskName) != null) {
return
}
String realTaskName = taskName.substring(0, taskName.length() - "Dependencies".length())
Task realTask = project.tasks.findByName(realTaskName)
if (realTask == null) {
return
}
project.tasks.register(taskName) {
doLast {
println("${realTask.path} dependencies:")
for (Task dep : realTask.getTaskDependencies().getDependencies(realTask)) {
println(" - ${dep.path}")
}
}
}
}
def checkPart1 = tasks.register('checkPart1')
def checkPart2 = tasks.register('checkPart2')
plugins.withId('lifecycle-base') {
if (project.path.startsWith(":x-pack:")) {
checkPart2.configure { dependsOn 'check' }
} else {
checkPart1.configure { dependsOn 'check' }
}
}
}
tasks.register("precommit") {
dependsOn gradle.includedBuild('build-tools').task(':precommit')
dependsOn gradle.includedBuild('build-tools-internal').task(':precommit')
}
tasks.named("checkPart1").configure {
dependsOn gradle.includedBuild('build-tools').task(':check')
dependsOn gradle.includedBuild('build-tools-internal').task(':check')
}
tasks.named("assemble").configure {
dependsOn gradle.includedBuild('build-tools').task(':assemble')
}
subprojects {
project.ext.disableTasks = { String... tasknames ->
for (String taskname : tasknames) {
project.tasks.named(taskname).configure { enabled = false }
}
}
}
subprojects { Project subproj ->
plugins.withType(TestClustersPlugin).whenPluginAdded {
testClusters.all {
systemProperty "ingest.geoip.downloader.enabled.default", "false"
}
}
}