Apply 2-space indent to all gradle scripts (#48849)

Closes #48724. Update `.editorconfig` to make the Java settings the default
for all files, and then apply a 2-space indent to all `*.gradle` files.
Then reformat all the files.
This commit is contained in:
Rory Hunter 2019-11-13 10:14:04 +00:00 committed by GitHub
parent fbaf8c428d
commit 3a3e5f6176
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
184 changed files with 4122 additions and 4120 deletions

View file

@ -1,89 +1,89 @@
import com.bettercloud.vault.VaultConfig; import com.bettercloud.vault.VaultConfig
import com.bettercloud.vault.Vault; import com.bettercloud.vault.Vault
initscript { initscript {
repositories { repositories {
mavenCentral() mavenCentral()
} }
dependencies { dependencies {
classpath 'com.bettercloud:vault-java-driver:4.1.0' classpath 'com.bettercloud:vault-java-driver:4.1.0'
} }
} }
boolean USE_ARTIFACTORY=false boolean USE_ARTIFACTORY = false
if (System.getenv('VAULT_ADDR') == null) { if (System.getenv('VAULT_ADDR') == null) {
throw new GradleException("You must set the VAULT_ADDR environment variable to use this init script.") throw new GradleException("You must set the VAULT_ADDR environment variable to use this init script.")
} }
if (System.getenv('VAULT_ROLE_ID') == null && System.getenv('VAULT_SECRET_ID') == null && System.getenv('VAULT_TOKEN') == null) { if (System.getenv('VAULT_ROLE_ID') == null && System.getenv('VAULT_SECRET_ID') == null && System.getenv('VAULT_TOKEN') == null) {
throw new GradleException("You must set either the VAULT_ROLE_ID and VAULT_SECRET_ID environment variables, " + throw new GradleException("You must set either the VAULT_ROLE_ID and VAULT_SECRET_ID environment variables, " +
"or the VAULT_TOKEN environment variable to use this init script.") "or the VAULT_TOKEN environment variable to use this init script.")
} }
final String vaultToken = System.getenv('VAULT_TOKEN') ?: new Vault( final String vaultToken = System.getenv('VAULT_TOKEN') ?: new Vault(
new VaultConfig() new VaultConfig()
.address(System.env.VAULT_ADDR) .address(System.env.VAULT_ADDR)
.engineVersion(1) .engineVersion(1)
.build() .build()
) )
.withRetries(5, 1000) .withRetries(5, 1000)
.auth() .auth()
.loginByAppRole("approle", System.env.VAULT_ROLE_ID, System.env.VAULT_SECRET_ID) .loginByAppRole("approle", System.env.VAULT_ROLE_ID, System.env.VAULT_SECRET_ID)
.getAuthClientToken(); .getAuthClientToken()
final Vault vault = new Vault( final Vault vault = new Vault(
new VaultConfig() new VaultConfig()
.address(System.env.VAULT_ADDR) .address(System.env.VAULT_ADDR)
.engineVersion(1) .engineVersion(1)
.token(vaultToken) .token(vaultToken)
.build() .build()
) )
.withRetries(5, 1000) .withRetries(5, 1000)
if (USE_ARTIFACTORY) { if (USE_ARTIFACTORY) {
final Map<String,String> artifactoryCredentials = vault.logical() final Map<String, String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co") .read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData(); .getData()
logger.info("Using elastic artifactory repos") logger.info("Using elastic artifactory repos")
Closure configCache = { Closure configCache = {
return { return {
name "artifactory-gradle-release" name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release" url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials { credentials {
username artifactoryCredentials.get("username") username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token") password artifactoryCredentials.get("token")
} }
}
} }
settingsEvaluated { settings -> }
settings.pluginManagement { settingsEvaluated { settings ->
repositories { settings.pluginManagement {
maven configCache() repositories {
} maven configCache()
} }
} }
projectsLoaded { }
allprojects { projectsLoaded {
buildscript { allprojects {
repositories { buildscript {
maven configCache() repositories {
} maven configCache()
}
repositories {
maven configCache()
}
} }
}
repositories {
maven configCache()
}
} }
}
} }
projectsLoaded { projectsLoaded {
rootProject { rootProject {
project.pluginManager.withPlugin('com.gradle.build-scan') { project.pluginManager.withPlugin('com.gradle.build-scan') {
buildScan.server = 'https://gradle-enterprise.elastic.co' buildScan.server = 'https://gradle-enterprise.elastic.co'
}
} }
}
} }
@ -91,24 +91,24 @@ final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.u
final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false')) final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false'))
if (buildCacheUrl) { if (buildCacheUrl) {
final Map<String,String> buildCacheCredentials = vault.logical() final Map<String, String> buildCacheCredentials = vault.logical()
.read("secret/elasticsearch-ci/gradle-build-cache") .read("secret/elasticsearch-ci/gradle-build-cache")
.getData(); .getData()
gradle.settingsEvaluated { settings -> gradle.settingsEvaluated { settings ->
settings.buildCache { settings.buildCache {
local { local {
// Disable the local build cache in CI since we use ephemeral workers and it incurs an IO penalty // Disable the local build cache in CI since we use ephemeral workers and it incurs an IO penalty
enabled = false enabled = false
} }
remote(HttpBuildCache) { remote(HttpBuildCache) {
url = buildCacheUrl url = buildCacheUrl
push = buildCachePush push = buildCachePush
credentials { credentials {
username = buildCacheCredentials.get("username") username = buildCacheCredentials.get("username")
password = buildCacheCredentials.get("password") password = buildCacheCredentials.get("password")
}
}
} }
}
} }
}
} }

View file

@ -2,12 +2,15 @@
root = true root = true
[*.java] [*]
charset = utf-8 charset = utf-8
indent_style = space
indent_size = 4
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true
indent_style = space
indent_size = 4
[*.bat] [*.bat]
indent_size = 2 indent_size = 2
[*.gradle]
indent_size = 2

View file

@ -29,16 +29,16 @@ archivesBaseName = 'elasticsearch-benchmarks'
test.enabled = false test.enabled = false
dependencies { dependencies {
compile(project(":server")) { compile(project(":server")) {
// JMH ships with the conflicting version 4.6. This prevents us from using jopt-simple in benchmarks (which should be ok) but allows // JMH ships with the conflicting version 4.6. This prevents us from using jopt-simple in benchmarks (which should be ok) but allows
// us to invoke the JMH uberjar as usual. // us to invoke the JMH uberjar as usual.
exclude group: 'net.sf.jopt-simple', module: 'jopt-simple' exclude group: 'net.sf.jopt-simple', module: 'jopt-simple'
} }
compile "org.openjdk.jmh:jmh-core:$versions.jmh" compile "org.openjdk.jmh:jmh-core:$versions.jmh"
annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh" annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
// Dependencies of JMH // Dependencies of JMH
runtime 'net.sf.jopt-simple:jopt-simple:4.6' runtime 'net.sf.jopt-simple:jopt-simple:4.6'
runtime 'org.apache.commons:commons-math3:3.2' runtime 'org.apache.commons:commons-math3:3.2'
} }
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked,-processing" compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked,-processing"
@ -55,13 +55,13 @@ forbiddenApisMain.enabled = false
dependencyLicenses.enabled = false dependencyLicenses.enabled = false
dependenciesInfo.enabled = false dependenciesInfo.enabled = false
thirdPartyAudit.ignoreViolations ( thirdPartyAudit.ignoreViolations(
// these classes intentionally use JDK internal API (and this is ok since the project is maintained by Oracle employees) // these classes intentionally use JDK internal API (and this is ok since the project is maintained by Oracle employees)
'org.openjdk.jmh.profile.AbstractHotspotProfiler', 'org.openjdk.jmh.profile.AbstractHotspotProfiler',
'org.openjdk.jmh.profile.HotspotThreadProfiler', 'org.openjdk.jmh.profile.HotspotThreadProfiler',
'org.openjdk.jmh.profile.HotspotClassloadingProfiler', 'org.openjdk.jmh.profile.HotspotClassloadingProfiler',
'org.openjdk.jmh.profile.HotspotCompilationProfiler', 'org.openjdk.jmh.profile.HotspotCompilationProfiler',
'org.openjdk.jmh.profile.HotspotMemoryProfiler', 'org.openjdk.jmh.profile.HotspotMemoryProfiler',
'org.openjdk.jmh.profile.HotspotRuntimeProfiler', 'org.openjdk.jmh.profile.HotspotRuntimeProfiler',
'org.openjdk.jmh.util.Utils' 'org.openjdk.jmh.util.Utils'
) )

View file

@ -32,10 +32,10 @@ import org.gradle.util.GradleVersion
import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure
plugins { plugins {
id 'com.gradle.build-scan' version '2.4.2' id 'com.gradle.build-scan' version '2.4.2'
id 'lifecycle-base' id 'lifecycle-base'
id 'elasticsearch.global-build-info' id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false id "com.diffplug.gradle.spotless" version "3.24.2" apply false
} }
apply plugin: 'nebula.info-scm' apply plugin: 'nebula.info-scm'
@ -106,7 +106,7 @@ subprojects {
// is greater than the number of unformatted projects, this can be // is greater than the number of unformatted projects, this can be
// switched to an exclude list, and eventualy removed completely. // switched to an exclude list, and eventualy removed completely.
def projectPathsToFormat = [ def projectPathsToFormat = [
':x-pack:plugin:enrich' ':x-pack:plugin:enrich'
] ]
if (projectPathsToFormat.contains(project.path)) { if (projectPathsToFormat.contains(project.path)) {
@ -136,14 +136,14 @@ subprojects {
BwcVersions versions = new BwcVersions(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8')) BwcVersions versions = new BwcVersions(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
task updateCIBwcVersions() { task updateCIBwcVersions() {
doLast { doLast {
File yml = file(".ci/bwcVersions") File yml = file(".ci/bwcVersions")
yml.text = "" yml.text = ""
yml << "BWC_VERSION:\n" yml << "BWC_VERSION:\n"
versions.indexCompatible.each { versions.indexCompatible.each {
yml << " - \"$it\"\n" yml << " - \"$it\"\n"
}
} }
}
} }
// build metadata from previous build, contains eg hashes for bwc builds // build metadata from previous build, contains eg hashes for bwc builds
@ -161,9 +161,9 @@ allprojects {
project.ext { project.ext {
// for ide hacks... // for ide hacks...
isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
gradle.startParameter.taskNames.contains('cleanEclipse') gradle.startParameter.taskNames.contains('cleanEclipse')
isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea') isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea')
// for BWC testing // for BWC testing
@ -178,22 +178,22 @@ task verifyVersions {
if (gradle.startParameter.isOffline()) { if (gradle.startParameter.isOffline()) {
throw new GradleException("Must run in online mode to verify versions") throw new GradleException("Must run in online mode to verify versions")
} }
// Read the list from maven central. // Read the list from maven central.
// Fetch the metadata an parse the xml into Version instances because it's more straight forward here // Fetch the metadata an parse the xml into Version instances because it's more straight forward here
// rather than bwcVersion ( VersionCollection ). // rather than bwcVersion ( VersionCollection ).
new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
bwcVersions.compareToAuthoritative( bwcVersions.compareToAuthoritative(
new XmlParser().parse(s) new XmlParser().parse(s)
.versioning.versions.version .versioning.versions.version
.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ } .collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }
.collect { Version.fromString(it) } .collect { Version.fromString(it) }
) )
} }
String ciYml = file(".ci/bwcVersions").text String ciYml = file(".ci/bwcVersions").text
bwcVersions.indexCompatible.each { bwcVersions.indexCompatible.each {
if (ciYml.contains("\"$it\"\n") == false) { if (ciYml.contains("\"$it\"\n") == false) {
throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results"); throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results");
} }
} }
} }
} }
@ -247,7 +247,7 @@ allprojects {
// the "value" -quiet is added, separated by a space. This is ok since the javadoc // the "value" -quiet is added, separated by a space. This is ok since the javadoc
// command already adds -quiet, so we are just duplicating it // command already adds -quiet, so we are just duplicating it
// see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959 // see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959
javadoc.options.encoding='UTF8' javadoc.options.encoding = 'UTF8'
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet') javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
} }
@ -288,19 +288,19 @@ allprojects {
} }
boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin) boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin)
project.configurations.compile.dependencies project.configurations.compile.dependencies
.findAll() .findAll()
.toSorted(sortClosure) .toSorted(sortClosure)
.each({ c -> depJavadocClosure(hasShadow, c) }) .each({ c -> depJavadocClosure(hasShadow, c) })
project.configurations.compileOnly.dependencies project.configurations.compileOnly.dependencies
.findAll() .findAll()
.toSorted(sortClosure) .toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) }) .each({ c -> depJavadocClosure(false, c) })
if (hasShadow) { if (hasShadow) {
// include any dependencies for shadow JAR projects that are *not* bundled in the shadow JAR // include any dependencies for shadow JAR projects that are *not* bundled in the shadow JAR
project.configurations.shadow.dependencies project.configurations.shadow.dependencies
.findAll() .findAll()
.toSorted(sortClosure) .toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) }) .each({ c -> depJavadocClosure(false, c) })
} }
} }
} }
@ -360,7 +360,7 @@ allprojects {
} }
tasks.named('cleanIdea') { tasks.named('cleanIdea') {
delete 'build-idea' delete 'build-idea'
} }
} }
@ -399,9 +399,9 @@ allprojects {
prefix = prefix.replace(':', '_') prefix = prefix.replace(':', '_')
} }
if (eclipse.project.name.startsWith(prefix)) { if (eclipse.project.name.startsWith(prefix)) {
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-header.txt') licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-header.txt')
} else { } else {
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt') licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt')
} }
String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n' String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n'
@ -411,7 +411,7 @@ allprojects {
// TODO: "package this up" for external builds // TODO: "package this up" for external builds
from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings') from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings')
into '.settings' into '.settings'
filter{ it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader)} filter { it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader) }
} }
// otherwise .settings is not nuked entirely // otherwise .settings is not nuked entirely
tasks.register('wipeEclipseSettings', Delete) { tasks.register('wipeEclipseSettings', Delete) {
@ -428,13 +428,14 @@ class Run extends DefaultTask {
boolean debug = false boolean debug = false
@Option( @Option(
option = "debug-jvm", option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch." description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
) )
public void setDebug(boolean enabled) { public void setDebug(boolean enabled) {
project.project(':distribution').run.debug = enabled project.project(':distribution').run.debug = enabled
} }
} }
task run(type: Run) { task run(type: Run) {
dependsOn ':distribution:run' dependsOn ':distribution:run'
description = 'Runs elasticsearch in the foreground' description = 'Runs elasticsearch in the foreground'
@ -443,20 +444,20 @@ task run(type: Run) {
} }
wrapper { wrapper {
distributionType = 'ALL' distributionType = 'ALL'
doLast { doLast {
final DistributionLocator locator = new DistributionLocator() final DistributionLocator locator = new DistributionLocator()
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion) final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH)) final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256") final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
final String sha256Sum = new String(sha256Uri.toURL().bytes) final String sha256Sum = new String(sha256Uri.toURL().bytes)
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n" wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
println "Added checksum to wrapper properties" println "Added checksum to wrapper properties"
// Update build-tools to reflect the Gradle upgrade // Update build-tools to reflect the Gradle upgrade
// TODO: we can remove this once we have tests to make sure older versions work. // TODO: we can remove this once we have tests to make sure older versions work.
project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion
println "Updated minimum Gradle Version" println "Updated minimum Gradle Version"
} }
} }
gradle.projectsEvaluated { gradle.projectsEvaluated {
@ -482,10 +483,10 @@ gradle.projectsEvaluated {
String coords = "${p.group}:${p.name}" String coords = "${p.group}:${p.name}"
if (false == coordsToProject.putIfAbsent(coords, p)) { if (false == coordsToProject.putIfAbsent(coords, p)) {
throw new GradleException( throw new GradleException(
"Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " + "Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " +
"have the same name and group: ${coords}. " + "have the same name and group: ${coords}. " +
"This doesn't currently work correctly in Gradle, see: " + "This doesn't currently work correctly in Gradle, see: " +
"https://github.com/gradle/gradle/issues/847" "https://github.com/gradle/gradle/issues/847"
) )
} }
} }
@ -493,10 +494,10 @@ gradle.projectsEvaluated {
allprojects { allprojects {
tasks.register('resolveAllDependencies') { tasks.register('resolveAllDependencies') {
dependsOn tasks.matching { it.name == "pullFixture"} dependsOn tasks.matching { it.name == "pullFixture" }
doLast { doLast {
configurations.findAll { it.isCanBeResolved() }.each { it.resolve() } configurations.findAll { it.isCanBeResolved() }.each { it.resolve() }
} }
} }
// helper task to print direct dependencies of a single task // helper task to print direct dependencies of a single task
@ -525,10 +526,10 @@ allprojects {
def checkPart1 = tasks.register('checkPart1') def checkPart1 = tasks.register('checkPart1')
def checkPart2 = tasks.register('checkPart2') def checkPart2 = tasks.register('checkPart2')
plugins.withId('lifecycle-base') { plugins.withId('lifecycle-base') {
if (project.path.startsWith(":x-pack:")) { if (project.path.startsWith(":x-pack:")) {
checkPart2.configure { dependsOn 'check' } checkPart2.configure { dependsOn 'check' }
} else { } else {
checkPart1.configure { dependsOn 'check' } checkPart1.configure { dependsOn 'check' }
} }
} }
} }

View file

@ -28,7 +28,7 @@ group = 'org.elasticsearch.gradle'
String minimumGradleVersion = file('src/main/resources/minimumGradleVersion').text.trim() String minimumGradleVersion = file('src/main/resources/minimumGradleVersion').text.trim()
if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion)) { if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion)) {
throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch") throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch")
} }
if (project == rootProject) { if (project == rootProject) {
@ -72,21 +72,21 @@ allprojects {
} }
sourceSets { sourceSets {
// We have a few classes that need to be compiled for older java versions // We have a few classes that need to be compiled for older java versions
minimumRuntime { } minimumRuntime {}
} }
compileMinimumRuntimeJava { compileMinimumRuntimeJava {
targetCompatibility = 8 targetCompatibility = 8
sourceCompatibility = 8 sourceCompatibility = 8
} }
jar { jar {
from sourceSets.minimumRuntime.output from sourceSets.minimumRuntime.output
} }
javadoc { javadoc {
source sourceSets.minimumRuntime.allSource source sourceSets.minimumRuntime.allSource
} }
/***************************************************************************** /*****************************************************************************
@ -104,7 +104,7 @@ dependencies {
} }
compile localGroovy() compile localGroovy()
compile 'commons-codec:commons-codec:1.12' compile 'commons-codec:commons-codec:1.12'
compile 'org.apache.commons:commons-compress:1.19' compile 'org.apache.commons:commons-compress:1.19'
@ -183,13 +183,13 @@ if (project != rootProject) {
distribution project(':distribution:archives:linux-tar') distribution project(':distribution:archives:linux-tar')
distribution project(':distribution:archives:oss-linux-tar') distribution project(':distribution:archives:oss-linux-tar')
} }
// for external projects we want to remove the marker file indicating we are running the Elasticsearch project // for external projects we want to remove the marker file indicating we are running the Elasticsearch project
processResources { processResources {
exclude 'buildSrc.marker' exclude 'buildSrc.marker'
into('META-INF') { into('META-INF') {
from configurations.reaper from configurations.reaper
} }
} }
// TODO: re-enable once randomizedtesting gradle code is published and removed from here // TODO: re-enable once randomizedtesting gradle code is published and removed from here
@ -252,11 +252,11 @@ class VersionPropertiesLoader {
} }
if (elasticsearch.matches("[0-9]+\\.[0-9]+\\.[0-9]+") == false) { if (elasticsearch.matches("[0-9]+\\.[0-9]+\\.[0-9]+") == false) {
throw new IllegalStateException( throw new IllegalStateException(
"Expected elasticsearch version to be numbers only of the form X.Y.Z but it was: " + "Expected elasticsearch version to be numbers only of the form X.Y.Z but it was: " +
elasticsearch elasticsearch
) )
} }
String qualifier = systemProperties.getProperty("build.version_qualifier", ""); String qualifier = systemProperties.getProperty("build.version_qualifier", "")
if (qualifier.isEmpty() == false) { if (qualifier.isEmpty() == false) {
if (qualifier.matches("(alpha|beta|rc)\\d+") == false) { if (qualifier.matches("(alpha|beta|rc)\\d+") == false) {
throw new IllegalStateException("Invalid qualifier: " + qualifier) throw new IllegalStateException("Invalid qualifier: " + qualifier)

View file

@ -1,6 +1,6 @@
jar { jar {
archiveName = "${project.name}.jar" archiveName = "${project.name}.jar"
manifest { manifest {
attributes 'Main-Class': 'org.elasticsearch.gradle.reaper.Reaper' attributes 'Main-Class': 'org.elasticsearch.gradle.reaper.Reaper'
} }
} }

View file

@ -22,34 +22,34 @@ import org.elasticsearch.gradle.info.BuildParams
*/ */
plugins { plugins {
id 'elasticsearch.global-build-info' id 'elasticsearch.global-build-info'
} }
boolean internal = Boolean.parseBoolean(System.getProperty("tests.internal", "true")) boolean internal = Boolean.parseBoolean(System.getProperty("tests.internal", "true"))
BuildParams.init { it.setIsInternal(internal) } BuildParams.init { it.setIsInternal(internal) }
project.gradle.projectsEvaluated { project.gradle.projectsEvaluated {
// wire the download service url to wiremock // wire the download service url to wiremock
String fakeDownloadService = System.getProperty('tests.download_service') String fakeDownloadService = System.getProperty('tests.download_service')
if (fakeDownloadService != null) { if (fakeDownloadService != null) {
IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-downloads") IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-downloads")
repository.setUrl(fakeDownloadService) repository.setUrl(fakeDownloadService)
repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-downloads") repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-downloads")
repository.setUrl(fakeDownloadService) repository.setUrl(fakeDownloadService)
if (internal == false) { if (internal == false) {
repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-snapshots") repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-snapshots")
repository.setUrl(fakeDownloadService) repository.setUrl(fakeDownloadService)
repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-snapshots") repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-snapshots")
repository.setUrl(fakeDownloadService) repository.setUrl(fakeDownloadService)
}
} }
}
} }
if (internal) { if (internal) {
Version currentVersion = Version.fromString("9.0.0") Version currentVersion = Version.fromString("9.0.0")
BwcVersions versions = new BwcVersions(new TreeSet<>( BwcVersions versions = new BwcVersions(new TreeSet<>(
Arrays.asList(Version.fromString("8.0.0"), Version.fromString("8.0.1"), Version.fromString("8.1.0"), currentVersion)), Arrays.asList(Version.fromString("8.0.0"), Version.fromString("8.0.1"), Version.fromString("8.1.0"), currentVersion)),
currentVersion) currentVersion)
allprojects { allprojects {
ext.bwcVersions = versions ext.bwcVersions = versions
} }
} }

View file

@ -1,4 +1,3 @@
String distroConfig = System.getProperty('tests.local_distro.config') String distroConfig = System.getProperty('tests.local_distro.config')
if (distroConfig != null) { if (distroConfig != null) {
// setup the test distribution as an artifact of this project // setup the test distribution as an artifact of this project

View file

@ -1,5 +1,5 @@
plugins { plugins {
id 'elasticsearch.distribution-download' id 'elasticsearch.distribution-download'
} }
String distroVersion = System.getProperty('tests.distro.version') String distroVersion = System.getProperty('tests.distro.version')

View file

@ -1,38 +1,38 @@
plugins { plugins {
id 'elasticsearch.build' id 'elasticsearch.build'
} }
ext.licenseFile = file("$buildDir/dummy/license") ext.licenseFile = file("$buildDir/dummy/license")
ext.noticeFile = file("$buildDir/dummy/notice") ext.noticeFile = file("$buildDir/dummy/notice")
buildResources { buildResources {
copy 'checkstyle.xml' copy 'checkstyle.xml'
} }
task sampleCopyAll(type: Sync) { task sampleCopyAll(type: Sync) {
/** Note: no explicit dependency. This works with tasks that use the Provider API a.k.a "Lazy Configuration" **/ /** Note: no explicit dependency. This works with tasks that use the Provider API a.k.a "Lazy Configuration" **/
from buildResources from buildResources
into "$buildDir/sampleCopyAll" into "$buildDir/sampleCopyAll"
} }
task sample { task sample {
// This does not work, task dependencies can't be providers // This does not work, task dependencies can't be providers
// dependsOn buildResources.resource('minimumRuntimeVersion') // dependsOn buildResources.resource('minimumRuntimeVersion')
// Nor does this, despite https://github.com/gradle/gradle/issues/3811 // Nor does this, despite https://github.com/gradle/gradle/issues/3811
// dependsOn buildResources.outputDir // dependsOn buildResources.outputDir
// for now it's just // for now it's just
dependsOn buildResources dependsOn buildResources
// we have to reference it at configuration time in order to be picked up // we have to reference it at configuration time in order to be picked up
ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml') ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml')
doLast { doLast {
println "This task is using ${file(checkstyle_suppressions)}" println "This task is using ${file(checkstyle_suppressions)}"
} }
} }
task noConfigAfterExecution { task noConfigAfterExecution {
dependsOn buildResources dependsOn buildResources
doLast { doLast {
println "This should cause an error because we are refferencing " + println "This should cause an error because we are refferencing " +
"${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran." "${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
} }
} }

View file

@ -1,19 +1,19 @@
plugins { plugins {
id 'java' id 'java'
id 'elasticsearch.build' id 'elasticsearch.build'
} }
ext.licenseFile = file("LICENSE") ext.licenseFile = file("LICENSE")
ext.noticeFile = file("NOTICE") ext.noticeFile = file("NOTICE")
dependencies { dependencies {
compile "junit:junit:${versions.junit}" compile "junit:junit:${versions.junit}"
// missing classes in thirdparty audit // missing classes in thirdparty audit
compile 'org.hamcrest:hamcrest-core:1.3' compile 'org.hamcrest:hamcrest-core:1.3'
} }
repositories { repositories {
jcenter() jcenter()
} }
// todo remove offending rules // todo remove offending rules
@ -27,7 +27,7 @@ test.enabled = false
loggerUsageCheck.enabled = false loggerUsageCheck.enabled = false
task hello { task hello {
doFirst { doFirst {
println "build plugin can be applied" println "build plugin can be applied"
} }
} }

View file

@ -1,17 +1,16 @@
project.gradle.projectsEvaluated { project.gradle.projectsEvaluated {
// wire the jdk repo to wiremock // wire the jdk repo to wiremock
String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo')) String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo'))
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor')) String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
println rootProject.repositories.asMap.keySet() println rootProject.repositories.asMap.keySet()
IvyArtifactRepository repository = IvyArtifactRepository repository =
(IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVendor}_${fakeJdkVersion}") (IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVendor}_${fakeJdkVersion}")
repository.setUrl(fakeJdkRepo) repository.setUrl(fakeJdkRepo)
} }
task numConfigurations { task numConfigurations {
doLast { doLast {
println "NUM CONFIGS: ${project.configurations.size()}" println "NUM CONFIGS: ${project.configurations.size()}"
} }
} }

View file

@ -3,9 +3,9 @@ evaluationDependsOn ':subproj'
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor')) String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
jdks { jdks {
linux_jdk { linux_jdk {
vendor = fakeJdkVendor vendor = fakeJdkVendor
version = fakeJdkVersion version = fakeJdkVersion
platform = "linux" platform = "linux"
} }
} }

View file

@ -1,45 +1,45 @@
plugins { plugins {
id 'elasticsearch.jdk-download' id 'elasticsearch.jdk-download'
} }
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor')) String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
jdks { jdks {
linux { linux {
vendor = fakeJdkVendor vendor = fakeJdkVendor
version = fakeJdkVersion version = fakeJdkVersion
platform = "linux" platform = "linux"
} }
darwin { darwin {
vendor = fakeJdkVendor vendor = fakeJdkVendor
version = fakeJdkVersion version = fakeJdkVersion
platform = "darwin" platform = "darwin"
} }
windows { windows {
vendor = fakeJdkVendor vendor = fakeJdkVendor
version = fakeJdkVersion version = fakeJdkVersion
platform = "windows" platform = "windows"
} }
} }
task getLinuxJdk { task getLinuxJdk {
dependsOn jdks.linux dependsOn jdks.linux
doLast { doLast {
println "JDK HOME: " + jdks.linux println "JDK HOME: " + jdks.linux
} }
} }
task getDarwinJdk { task getDarwinJdk {
dependsOn jdks.darwin dependsOn jdks.darwin
doLast { doLast {
println "JDK HOME: " + jdks.darwin println "JDK HOME: " + jdks.darwin
} }
} }
task getWindowsJdk { task getWindowsJdk {
dependsOn jdks.windows dependsOn jdks.windows
doLast { doLast {
println "JDK HOME: " + jdks.windows println "JDK HOME: " + jdks.windows
} }
} }

View file

@ -1,11 +1,11 @@
plugins { plugins {
id 'elasticsearch.reaper' id 'elasticsearch.reaper'
} }
task launchReaper { task launchReaper {
doLast { doLast {
def reaper = project.extensions.getByName('reaper') def reaper = project.extensions.getByName('reaper')
reaper.registerCommand('test', 'true') reaper.registerCommand('test', 'true')
reaper.unregister('test') reaper.unregister('test')
} }
} }

View file

@ -1,53 +1,53 @@
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
plugins { plugins {
id 'base' id 'base'
id 'distribution' id 'distribution'
id 'elasticsearch.symbolic-link-preserving-tar' id 'elasticsearch.symbolic-link-preserving-tar'
} }
final String source = Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_source')) final String source = Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_source'))
boolean preserveFileTimestamps; boolean preserveFileTimestamps;
final String testPreserveFileTimestamps = final String testPreserveFileTimestamps =
Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_preserve_file_timestamps')) Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_preserve_file_timestamps'))
switch (testPreserveFileTimestamps) { switch (testPreserveFileTimestamps) {
case "true": case "true":
preserveFileTimestamps = true preserveFileTimestamps = true
break break
case "false": case "false":
preserveFileTimestamps = false preserveFileTimestamps = false
break break
default: default:
throw new IllegalArgumentException( throw new IllegalArgumentException(
"tests.symbolic_link_preserving_tar_preserve_file_timestamps must be [true] or [false] but was [" "tests.symbolic_link_preserving_tar_preserve_file_timestamps must be [true] or [false] but was ["
+ testPreserveFileTimestamps + "]") + testPreserveFileTimestamps + "]")
} }
task buildBZip2Tar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar -> task buildBZip2Tar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
tar.archiveExtension = 'tar.bz2' tar.archiveExtension = 'tar.bz2'
tar.compression = Compression.BZIP2 tar.compression = Compression.BZIP2
tar.preserveFileTimestamps = preserveFileTimestamps tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source) from fileTree(source)
doLast { doLast {
println archiveFile.get().asFile.path println archiveFile.get().asFile.path
} }
} }
task buildGZipTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar -> task buildGZipTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
tar.archiveExtension = 'tar.gz' tar.archiveExtension = 'tar.gz'
tar.compression = Compression.GZIP tar.compression = Compression.GZIP
tar.preserveFileTimestamps = preserveFileTimestamps tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source) from fileTree(source)
doLast{ doLast {
println archiveFile.get().asFile.path println archiveFile.get().asFile.path
} }
} }
task buildTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar -> task buildTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
tar.archiveExtension = 'tar' tar.archiveExtension = 'tar'
tar.preserveFileTimestamps = preserveFileTimestamps tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source) from fileTree(source)
doLast{ doLast {
println archiveFile.get().asFile.path println archiveFile.get().asFile.path
} }
} }

View file

@ -1,83 +1,83 @@
plugins { plugins {
id 'elasticsearch.build' apply false id 'elasticsearch.build' apply false
} }
allprojects { allprojects {
apply plugin: 'java' apply plugin: 'java'
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
repositories { repositories {
jcenter() jcenter()
} }
dependencies { dependencies {
testCompile "junit:junit:4.12" testCompile "junit:junit:4.12"
} }
ext.licenseFile = file("$buildDir/dummy/license") ext.licenseFile = file("$buildDir/dummy/license")
ext.noticeFile = file("$buildDir/dummy/notice") ext.noticeFile = file("$buildDir/dummy/notice")
testingConventions.naming { testingConventions.naming {
// Reset default to no baseClass checks // Reset default to no baseClass checks
Tests { Tests {
baseClasses = [] baseClasses = []
}
IT {
baseClasses = []
}
} }
IT {
baseClasses = []
}
}
} }
project(':empty_test_task') { project(':empty_test_task') {
task emptyTest(type: Test) { task emptyTest(type: Test) {
} }
} }
project(':all_classes_in_tasks') { project(':all_classes_in_tasks') {
test { test {
include "**/Convention*" include "**/Convention*"
} }
} }
project(':not_implementing_base') { project(':not_implementing_base') {
testingConventions.naming { testingConventions.naming {
Tests { Tests {
baseClass 'org.elasticsearch.gradle.testkit.Unit' baseClass 'org.elasticsearch.gradle.testkit.Unit'
}
IT {
baseClass 'org.elasticsearch.gradle.testkit.Integration'
}
} }
test { IT {
include "**/*IT.class" baseClass 'org.elasticsearch.gradle.testkit.Integration'
include "**/*Tests.class"
} }
}
test {
include "**/*IT.class"
include "**/*Tests.class"
}
} }
project(':valid_setup_no_base') { project(':valid_setup_no_base') {
test { test {
include "**/*IT.class" include "**/*IT.class"
include "**/*Tests.class" include "**/*Tests.class"
} }
} }
project(':tests_in_main') { project(':tests_in_main') {
} }
project (':valid_setup_with_base') { project(':valid_setup_with_base') {
test { test {
include "**/*IT.class" include "**/*IT.class"
include "**/*Tests.class" include "**/*Tests.class"
}
testingConventions.naming {
Tests {
baseClass 'org.elasticsearch.gradle.testkit.Unit'
} }
testingConventions.naming { IT {
Tests { baseClass 'org.elasticsearch.gradle.testkit.Integration'
baseClass 'org.elasticsearch.gradle.testkit.Unit'
}
IT {
baseClass 'org.elasticsearch.gradle.testkit.Integration'
}
} }
}
} }

View file

@ -1,39 +1,39 @@
import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask
plugins { plugins {
id 'java' id 'java'
// bring in build-tools onto the classpath // bring in build-tools onto the classpath
id 'elasticsearch.global-build-info' apply false id 'elasticsearch.global-build-info' apply false
} }
repositories { repositories {
/** /**
* Local test repo contains dummy jars with different group names and versions. * Local test repo contains dummy jars with different group names and versions.
* - broken-log4j creates a log4j logger but has no pom, so the class will be missing * - broken-log4j creates a log4j logger but has no pom, so the class will be missing
* - dummy-io has a class that creates a new java.io.File ( something which third-party-audit-absurd.txt forbids ) * - dummy-io has a class that creates a new java.io.File ( something which third-party-audit-absurd.txt forbids )
* - version 0.0.2 has the same class and one extra file just to make the jar different * - version 0.0.2 has the same class and one extra file just to make the jar different
*/ */
maven { maven {
name = "local-test" name = "local-test"
url = file("sample_jars/build/testrepo") url = file("sample_jars/build/testrepo")
} }
jcenter() jcenter()
} }
configurations.create("forbiddenApisCliJar") configurations.create("forbiddenApisCliJar")
dependencies { dependencies {
forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.7' forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.7'
compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}" compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}"
compile "org.${project.properties.compileGroup}:${project.properties.compileVersion}" compile "org.${project.properties.compileGroup}:${project.properties.compileVersion}"
} }
task empty(type: ThirdPartyAuditTask) { task empty(type: ThirdPartyAuditTask) {
targetCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11
signatureFile = file('third-party-audit-empty.txt') signatureFile = file('third-party-audit-empty.txt')
} }
task absurd(type: ThirdPartyAuditTask) { task absurd(type: ThirdPartyAuditTask) {
targetCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11
signatureFile = file('third-party-audit-absurd.txt') signatureFile = file('third-party-audit-absurd.txt')
} }

View file

@ -1,52 +1,52 @@
plugins { plugins {
id 'java' id 'java'
} }
repositories { repositories {
mavenCentral() mavenCentral()
} }
dependencies { dependencies {
compile 'org.apache.logging.log4j:log4j-core:2.11.1' compile 'org.apache.logging.log4j:log4j-core:2.11.1'
} }
// Tests have to clean mid-test but we don't want the sample jars to go away // Tests have to clean mid-test but we don't want the sample jars to go away
clean.enabled = false clean.enabled = false
["0.0.1", "0.0.2"].forEach { v -> ["0.0.1", "0.0.2"].forEach { v ->
["elasticsearch", "other"].forEach { p -> ["elasticsearch", "other"].forEach { p ->
task "dummy-${p}-${v}"(type: Jar) { task "dummy-${p}-${v}"(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/dummy-io/${v}/") destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/dummy-io/${v}/")
archiveName = "dummy-io-${v}.jar" archiveName = "dummy-io-${v}.jar"
from sourceSets.main.output from sourceSets.main.output
include "**/TestingIO.class" include "**/TestingIO.class"
if (v == "0.0.2") { if (v == "0.0.2") {
manifest { manifest {
attributes( attributes(
"X-Different": "Different manifest, different jar" "X-Different": "Different manifest, different jar"
) )
}
}
} }
build.dependsOn("dummy-${p}-${v}") }
} }
build.dependsOn("dummy-${p}-${v}")
}
} }
["0.0.1"].forEach { v -> ["0.0.1"].forEach { v ->
["elasticsearch", "other"].forEach { p -> ["elasticsearch", "other"].forEach { p ->
task "broken-log4j-${p}-${v}"(type: Jar) { task "broken-log4j-${p}-${v}"(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/broken-log4j/${v}/") destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/broken-log4j/${v}/")
archiveName = "broken-log4j-${v}.jar" archiveName = "broken-log4j-${v}.jar"
from sourceSets.main.output from sourceSets.main.output
include "**/TestingLog4j.class" include "**/TestingLog4j.class"
}
build.dependsOn("broken-log4j-${p}-${v}")
} }
build.dependsOn("broken-log4j-${p}-${v}")
}
} }
task jarhellJdk(type: Jar) { task jarhellJdk(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/other/gradle/jarhellJdk/0.0.1/") destinationDir = file("${buildDir}/testrepo/org/other/gradle/jarhellJdk/0.0.1/")
archiveName = "jarhellJdk-0.0.1.jar" archiveName = "jarhellJdk-0.0.1.jar"
from sourceSets.main.output from sourceSets.main.output
include "**/String.class" include "**/String.class"
into "java/lang" into "java/lang"
build.dependsOn("jarhellJdk") build.dependsOn("jarhellJdk")
} }

View file

@ -42,9 +42,9 @@ configurations {
idea { idea {
module { module {
if (scopes.TEST != null) { if (scopes.TEST != null) {
scopes.TEST.plus.add(project.configurations.restSpec) scopes.TEST.plus.add(project.configurations.restSpec)
} }
} }
} }
@ -68,7 +68,7 @@ dependencies {
// Needed for serialization tests: // Needed for serialization tests:
// (In order to serialize a server side class to a client side class or the other way around) // (In order to serialize a server side class to a client side class or the other way around)
testCompile(project(':x-pack:plugin:core')) { testCompile(project(':x-pack:plugin:core')) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client' exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client'
} }
restSpec project(':rest-api-spec') restSpec project(':rest-api-spec')
@ -141,8 +141,8 @@ testClusters.all {
keystore 'xpack.security.transport.ssl.truststore.secure_password', 'testnode' keystore 'xpack.security.transport.ssl.truststore.secure_password', 'testnode'
extraConfigFile 'roles.yml', file('roles.yml') extraConfigFile 'roles.yml', file('roles.yml')
user username: System.getProperty('tests.rest.cluster.username', 'test_user'), user username: System.getProperty('tests.rest.cluster.username', 'test_user'),
password: System.getProperty('tests.rest.cluster.password', 'test-password'), password: System.getProperty('tests.rest.cluster.password', 'test-password'),
role: System.getProperty('tests.rest.cluster.role', 'admin') role: System.getProperty('tests.rest.cluster.role', 'admin')
user username: 'admin_user', password: 'admin-password' user username: 'admin_user', password: 'admin-password'
extraConfigFile nodeCert.name, nodeCert extraConfigFile nodeCert.name, nodeCert

View file

@ -54,7 +54,7 @@ dependencies {
tasks.withType(CheckForbiddenApis) { tasks.withType(CheckForbiddenApis) {
//client does not depend on server, so only jdk and http signatures should be checked //client does not depend on server, so only jdk and http signatures should be checked
replaceSignatureFiles ('jdk-signatures', 'http-signatures') replaceSignatureFiles('jdk-signatures', 'http-signatures')
} }
forbiddenPatterns { forbiddenPatterns {
@ -69,7 +69,7 @@ forbiddenApisTest {
// JarHell is part of es server, which we don't want to pull in // JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core // TODO: Not anymore. Now in :libs:elasticsearch-core
jarHell.enabled=false jarHell.enabled = false
testingConventions { testingConventions {
naming.clear() naming.clear()
@ -80,7 +80,7 @@ testingConventions {
} }
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
//commons-logging optional dependencies //commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',

View file

@ -69,7 +69,7 @@ dependencyLicenses {
// JarHell is part of es server, which we don't want to pull in // JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core // TODO: Not anymore. Now in :libs:elasticsearch-core
jarHell.enabled=false jarHell.enabled = false
testingConventions { testingConventions {
naming.clear() naming.clear()
@ -80,14 +80,13 @@ testingConventions {
} }
} }
dependencyLicenses { dependencyLicenses {
dependencies = project.configurations.runtime.fileCollection { dependencies = project.configurations.runtime.fileCollection {
it.group.startsWith('org.elasticsearch') == false it.group.startsWith('org.elasticsearch') == false
} }
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
//commons-logging optional dependencies //commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',

View file

@ -45,7 +45,7 @@ forbiddenApisTest {
// JarHell is part of es server, which we don't want to pull in // JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core // TODO: Not anymore. Now in :libs:elasticsearch-core
jarHell.enabled=false jarHell.enabled = false
// TODO: should we have licenses for our test deps? // TODO: should we have licenses for our test deps?
dependencyLicenses.enabled = false dependencyLicenses.enabled = false

View file

@ -27,6 +27,7 @@ import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
// need this so Zip/Tar tasks get basic defaults... // need this so Zip/Tar tasks get basic defaults...
apply plugin: 'base' apply plugin: 'base'
@ -38,7 +39,7 @@ task createLogsDir(type: EmptyDirTask) {
dir = "${logsDir}" dir = "${logsDir}"
dirMode = 0755 dirMode = 0755
} }
ext.pluginsDir= new File(buildDir, 'plugins-hack/plugins') ext.pluginsDir = new File(buildDir, 'plugins-hack/plugins')
task createPluginsDir(type: EmptyDirTask) { task createPluginsDir(type: EmptyDirTask) {
dir = "${pluginsDir}" dir = "${pluginsDir}"
dirMode = 0755 dirMode = 0755

View file

@ -27,7 +27,7 @@ integTest.runner {
*/ */
if (System.getProperty("tests.rest.cluster") == null) { if (System.getProperty("tests.rest.cluster") == null) {
nonInputProperties.systemProperty 'tests.logfile', nonInputProperties.systemProperty 'tests.logfile',
"${ -> testClusters.integTest.singleNode().getServerLog()}" "${-> testClusters.integTest.singleNode().getServerLog()}"
} else { } else {
systemProperty 'tests.logfile', '--external--' systemProperty 'tests.logfile', '--external--'
} }

View file

@ -36,9 +36,9 @@ apply plugin: 'elasticsearch.testclusters'
// Concatenates the dependencies CSV files into a single file // Concatenates the dependencies CSV files into a single file
task generateDependenciesReport(type: ConcatFilesTask) { task generateDependenciesReport(type: ConcatFilesTask) {
files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' ) files = fileTree(dir: project.rootDir, include: '**/dependencies.csv')
headerLine = "name,version,url,license" headerLine = "name,version,url,license"
target = new File(System.getProperty('csv')?: "${project.buildDir}/reports/dependencies/es-dependencies.csv") target = new File(System.getProperty('csv') ?: "${project.buildDir}/reports/dependencies/es-dependencies.csv")
} }
/***************************************************************************** /*****************************************************************************
@ -138,7 +138,7 @@ void copyModule(Sync copyTask, Project module) {
exclude 'config/log4j2.properties' exclude 'config/log4j2.properties'
eachFile { details -> eachFile { details ->
String name = module.plugins.hasPlugin('elasticsearch.esplugin') ? module.esplugin.name : module.es_meta_plugin.name String name = module.plugins.hasPlugin('elasticsearch.esplugin') ? module.esplugin.name : module.es_meta_plugin.name
// Copy all non config/bin files // Copy all non config/bin files
// Note these might be unde a subdirectory in the case of a meta plugin // Note these might be unde a subdirectory in the case of a meta plugin
if ((details.relativePath.pathString ==~ /([^\/]+\/)?(config|bin)\/.*/) == false) { if ((details.relativePath.pathString ==~ /([^\/]+\/)?(config|bin)\/.*/) == false) {
@ -228,7 +228,7 @@ Project xpack = project(':x-pack:plugin')
xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule -> xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule ->
File licenses = new File(xpackModule.projectDir, 'licenses') File licenses = new File(xpackModule.projectDir, 'licenses')
if (licenses.exists()) { if (licenses.exists()) {
buildDefaultNotice.licensesDir licenses buildDefaultNotice.licensesDir licenses
} }
copyModule(processDefaultOutputs, xpackModule) copyModule(processDefaultOutputs, xpackModule)
copyLog4jProperties(buildDefaultLog4jConfig, xpackModule) copyLog4jProperties(buildDefaultLog4jConfig, xpackModule)
@ -298,9 +298,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
} }
List excludePlatforms = ['linux', 'windows', 'darwin'] List excludePlatforms = ['linux', 'windows', 'darwin']
if (platform != null) { if (platform != null) {
excludePlatforms.remove(excludePlatforms.indexOf(platform)) excludePlatforms.remove(excludePlatforms.indexOf(platform))
} else { } else {
excludePlatforms = [] excludePlatforms = []
} }
from(buildModules) { from(buildModules) {
for (String excludePlatform : excludePlatforms) { for (String excludePlatform : excludePlatforms) {
@ -337,7 +337,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
binFiles = { distributionType, oss, jdk -> binFiles = { distributionType, oss, jdk ->
copySpec { copySpec {
// non-windows files, for all distributions // non-windows files, for all distributions
with copySpec { with copySpec {
from '../src/bin' from '../src/bin'
exclude '*.exe' exclude '*.exe'
@ -387,7 +387,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
} else { } else {
from buildDefaultNoJdkNotice from buildDefaultNoJdkNotice
} }
} }
} }
} }
@ -488,10 +488,10 @@ subprojects {
licenseText = rootProject.file('licenses/ELASTIC-LICENSE.txt').getText('UTF-8') licenseText = rootProject.file('licenses/ELASTIC-LICENSE.txt').getText('UTF-8')
} }
// license text needs to be indented with a single space // license text needs to be indented with a single space
licenseText = ' ' + licenseText.replace('\n', '\n ') licenseText = ' ' + licenseText.replace('\n', '\n ')
String footer = "# Built for ${project.name}-${project.version} " + String footer = "# Built for ${project.name}-${project.version} " +
"(${distributionType})" "(${distributionType})"
Map<String, Object> expansions = [ Map<String, Object> expansions = [
'project.name': project.name, 'project.name': project.name,
'project.version': version, 'project.version': version,
@ -514,9 +514,9 @@ subprojects {
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done', 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done',
], ],
'source.path.env': [ 'source.path.env': [
'deb': 'source /etc/default/elasticsearch', 'deb': 'source /etc/default/elasticsearch',
'rpm': 'source /etc/sysconfig/elasticsearch', 'rpm': 'source /etc/sysconfig/elasticsearch',
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi', 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi',
], ],
'path.logs': [ 'path.logs': [
'deb': packagingPathLogs, 'deb': packagingPathLogs,
@ -559,7 +559,6 @@ subprojects {
'def': oss ? 'oss' : 'default' 'def': oss ? 'oss' : 'default'
], ],
'es.distribution.type': [ 'es.distribution.type': [
'deb': 'deb', 'deb': 'deb',
'rpm': 'rpm', 'rpm': 'rpm',
@ -593,7 +592,7 @@ subprojects {
return result return result
} }
ext.assertLinesInFile = { Path path, List<String> expectedLines -> ext.assertLinesInFile = { Path path, List<String> expectedLines ->
final List<String> actualLines = Files.readAllLines(path) final List<String> actualLines = Files.readAllLines(path)
int line = 0 int line = 0
for (final String expectedLine : expectedLines) { for (final String expectedLine : expectedLines) {
@ -606,12 +605,12 @@ subprojects {
} }
} }
['archives:windows-zip','archives:oss-windows-zip', ['archives:windows-zip', 'archives:oss-windows-zip',
'archives:darwin-tar','archives:oss-darwin-tar', 'archives:darwin-tar', 'archives:oss-darwin-tar',
'archives:linux-tar', 'archives:oss-linux-tar', 'archives:linux-tar', 'archives:oss-linux-tar',
'archives:integ-test-zip', 'archives:integ-test-zip',
'packages:rpm', 'packages:deb', 'packages:rpm', 'packages:deb',
'packages:oss-rpm', 'packages:oss-deb', 'packages:oss-rpm', 'packages:oss-deb',
].forEach { subName -> ].forEach { subName ->
Project subproject = project("${project.path}:${subName}") Project subproject = project("${project.path}:${subName}")
Configuration configuration = configurations.create(subproject.name) Configuration configuration = configurations.create(subproject.name)

View file

@ -35,7 +35,8 @@ import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
* unreleased versions are when Gradle projects are set up, so we use "build-unreleased-version-*" as placeholders * unreleased versions are when Gradle projects are set up, so we use "build-unreleased-version-*" as placeholders
* and configure them to build various versions here. * and configure them to build various versions here.
*/ */
bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleasedVersion -> project("${unreleasedVersion.gradleProjectPath}") { bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleasedVersion ->
project("${unreleasedVersion.gradleProjectPath}") {
Version bwcVersion = unreleasedVersion.version Version bwcVersion = unreleasedVersion.version
String bwcBranch = unreleasedVersion.branch String bwcBranch = unreleasedVersion.branch
apply plugin: 'distribution' apply plugin: 'distribution'
@ -49,196 +50,196 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
boolean gitFetchLatest boolean gitFetchLatest
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true") final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
if ("true".equals(gitFetchLatestProperty)) { if ("true".equals(gitFetchLatestProperty)) {
gitFetchLatest = true gitFetchLatest = true
} else if ("false".equals(gitFetchLatestProperty)) { } else if ("false".equals(gitFetchLatestProperty)) {
gitFetchLatest = false gitFetchLatest = false
} else { } else {
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]") throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
} }
task createClone(type: LoggedExec) { task createClone(type: LoggedExec) {
onlyIf { checkoutDir.exists() == false } onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir] commandLine = ['git', 'clone', rootDir, checkoutDir]
} }
task findRemote(type: LoggedExec) { task findRemote(type: LoggedExec) {
dependsOn createClone dependsOn createClone
workingDir = checkoutDir workingDir = checkoutDir
commandLine = ['git', 'remote', '-v'] commandLine = ['git', 'remote', '-v']
ByteArrayOutputStream output = new ByteArrayOutputStream() ByteArrayOutputStream output = new ByteArrayOutputStream()
standardOutput = output standardOutput = output
doLast { doLast {
project.ext.remoteExists = false project.ext.remoteExists = false
output.toString('UTF-8').eachLine { output.toString('UTF-8').eachLine {
if (it.contains("${remote}\t")) { if (it.contains("${remote}\t")) {
project.ext.remoteExists = true project.ext.remoteExists = true
} }
}
} }
}
} }
task addRemote(type: LoggedExec) { task addRemote(type: LoggedExec) {
dependsOn findRemote dependsOn findRemote
onlyIf { project.ext.remoteExists == false } onlyIf { project.ext.remoteExists == false }
workingDir = checkoutDir workingDir = checkoutDir
commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"] commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"]
} }
task fetchLatest(type: LoggedExec) { task fetchLatest(type: LoggedExec) {
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest } onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
dependsOn addRemote dependsOn addRemote
workingDir = checkoutDir workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all'] commandLine = ['git', 'fetch', '--all']
} }
Closure execGit = { Action<ExecSpec> action -> Closure execGit = { Action<ExecSpec> action ->
new ByteArrayOutputStream().withStream { os -> new ByteArrayOutputStream().withStream { os ->
ExecResult result = project.exec { spec -> ExecResult result = project.exec { spec ->
workingDir = checkoutDir workingDir = checkoutDir
standardOutput os standardOutput os
action.execute(spec) action.execute(spec)
}
result.assertNormalExitValue()
return os.toString().trim()
} }
result.assertNormalExitValue()
return os.toString().trim()
}
} }
task checkoutBwcBranch() { task checkoutBwcBranch() {
dependsOn fetchLatest dependsOn fetchLatest
doLast { doLast {
String refspec = System.getProperty("bwc.refspec.${bwcBranch}") ?: System.getProperty("tests.bwc.refspec.${bwcBranch}") ?: "${remote}/${bwcBranch}" String refspec = System.getProperty("bwc.refspec.${bwcBranch}") ?: System.getProperty("tests.bwc.refspec.${bwcBranch}") ?: "${remote}/${bwcBranch}"
if (System.getProperty("bwc.checkout.align") != null) { if (System.getProperty("bwc.checkout.align") != null) {
/* /*
We use a time based approach to make the bwc versions built deterministic and compatible with the current hash. We use a time based approach to make the bwc versions built deterministic and compatible with the current hash.
Most of the time we want to test against latest, but when running delayed exhaustive tests or wanting Most of the time we want to test against latest, but when running delayed exhaustive tests or wanting
reproducible builds we want this to be deterministic by using a hash that was the latest when the current reproducible builds we want this to be deterministic by using a hash that was the latest when the current
commit was made. commit was made.
This approach doesn't work with merge commits as these can introduce commits in the chronological order This approach doesn't work with merge commits as these can introduce commits in the chronological order
after the fact e.x. a merge done today can add commits dated with yesterday so the result will no longer be after the fact e.x. a merge done today can add commits dated with yesterday so the result will no longer be
deterministic. deterministic.
We don't use merge commits, but for additional safety we check that no such commits exist in the time period We don't use merge commits, but for additional safety we check that no such commits exist in the time period
we are interested in. we are interested in.
Timestamps are at seconds resolution. rev-parse --before and --after are inclusive w.r.t the second Timestamps are at seconds resolution. rev-parse --before and --after are inclusive w.r.t the second
passed as input. This means the results might not be deterministic in the current second, but this passed as input. This means the results might not be deterministic in the current second, but this
should not matter in practice. should not matter in practice.
*/ */
String timeOfCurrent = execGit { spec -> String timeOfCurrent = execGit { spec ->
spec.commandLine 'git', 'show', '--no-patch', '--no-notes', "--pretty='%cD'" spec.commandLine 'git', 'show', '--no-patch', '--no-notes', "--pretty='%cD'"
spec.workingDir project.rootDir spec.workingDir project.rootDir
} }
logger.lifecycle("Commit date of current: {}", timeOfCurrent) logger.lifecycle("Commit date of current: {}", timeOfCurrent)
String mergeCommits = execGit { spec -> String mergeCommits = execGit { spec ->
spec.commandLine "git", "rev-list", refspec, "--after", timeOfCurrent, "--merges" spec.commandLine "git", "rev-list", refspec, "--after", timeOfCurrent, "--merges"
} }
if (mergeCommits.isEmpty() == false) { if (mergeCommits.isEmpty() == false) {
throw new IllegalStateException( throw new IllegalStateException(
"Found the following merge commits which prevent determining bwc commits: " + mergeCommits "Found the following merge commits which prevent determining bwc commits: " + mergeCommits
) )
} }
refspec = execGit { spec -> refspec = execGit { spec ->
spec.commandLine "git", "rev-list", refspec, "-n", "1", "--before", timeOfCurrent, "--date-order" spec.commandLine "git", "rev-list", refspec, "-n", "1", "--before", timeOfCurrent, "--date-order"
} }
}
logger.lifecycle("Performing checkout of ${refspec}...")
LoggedExec.exec(project) { spec ->
spec.workingDir = checkoutDir
spec.commandLine "git", "checkout", refspec
}
String checkoutHash = GlobalBuildInfoPlugin.gitRevision(checkoutDir)
logger.lifecycle("Checkout hash for ${project.path} is ${checkoutHash}")
file("${project.buildDir}/refspec").text = checkoutHash
} }
logger.lifecycle("Performing checkout of ${refspec}...")
LoggedExec.exec(project) { spec ->
spec.workingDir = checkoutDir
spec.commandLine "git", "checkout", refspec
}
String checkoutHash = GlobalBuildInfoPlugin.gitRevision(checkoutDir)
logger.lifecycle("Checkout hash for ${project.path} is ${checkoutHash}")
file("${project.buildDir}/refspec").text = checkoutHash
}
} }
Closure createRunBwcGradleTask = { name, extraConfig -> Closure createRunBwcGradleTask = { name, extraConfig ->
return tasks.create(name: "$name", type: LoggedExec) { return tasks.create(name: "$name", type: LoggedExec) {
dependsOn checkoutBwcBranch dependsOn checkoutBwcBranch
spoolOutput = true spoolOutput = true
workingDir = checkoutDir workingDir = checkoutDir
doFirst { doFirst {
// Execution time so that the checkouts are available // Execution time so that the checkouts are available
List<String> lines = file("${checkoutDir}/.ci/java-versions.properties").readLines() List<String> lines = file("${checkoutDir}/.ci/java-versions.properties").readLines()
environment( environment(
'JAVA_HOME', 'JAVA_HOME',
getJavaHome(it, Integer.parseInt( getJavaHome(it, Integer.parseInt(
lines lines
.findAll({ it.startsWith("ES_BUILD_JAVA=") }) .findAll({ it.startsWith("ES_BUILD_JAVA=") })
.collect({ it.replace("ES_BUILD_JAVA=java", "").trim() }) .collect({ it.replace("ES_BUILD_JAVA=java", "").trim() })
.collect({ it.replace("ES_BUILD_JAVA=openjdk", "").trim() }) .collect({ it.replace("ES_BUILD_JAVA=openjdk", "").trim() })
.join("!!") .join("!!")
)) ))
) )
environment( environment(
'RUNTIME_JAVA_HOME', 'RUNTIME_JAVA_HOME',
getJavaHome(it, Integer.parseInt( getJavaHome(it, Integer.parseInt(
lines lines
.findAll({ it.startsWith("ES_RUNTIME_JAVA=java") }) .findAll({ it.startsWith("ES_RUNTIME_JAVA=java") })
.collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() }) .collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() })
.join("!!") .join("!!")
)) ))
) )
}
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd'
args '/C', 'call', new File(checkoutDir, 'gradlew').toString()
} else {
executable new File(checkoutDir, 'gradlew').toString()
}
if (gradle.startParameter.isOffline()) {
args "--offline"
}
args "-Dbuild.snapshot=true"
final LogLevel logLevel = gradle.startParameter.logLevel
if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) {
args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}"
}
final String showStacktraceName = gradle.startParameter.showStacktrace.name()
assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName)
if (showStacktraceName.equals("ALWAYS")) {
args "--stacktrace"
} else if (showStacktraceName.equals("ALWAYS_FULL")) {
args "--full-stacktrace"
}
if (gradle.getStartParameter().isParallelProjectExecutionEnabled()) {
args "--parallel"
}
standardOutput = new IndentingOutputStream(System.out, bwcVersion)
errorOutput = new IndentingOutputStream(System.err, bwcVersion)
configure extraConfig
} }
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd'
args '/C', 'call', new File(checkoutDir, 'gradlew').toString()
} else {
executable new File(checkoutDir, 'gradlew').toString()
}
if (gradle.startParameter.isOffline()) {
args "--offline"
}
args "-Dbuild.snapshot=true"
final LogLevel logLevel = gradle.startParameter.logLevel
if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) {
args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}"
}
final String showStacktraceName = gradle.startParameter.showStacktrace.name()
assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName)
if (showStacktraceName.equals("ALWAYS")) {
args "--stacktrace"
} else if (showStacktraceName.equals("ALWAYS_FULL")) {
args "--full-stacktrace"
}
if (gradle.getStartParameter().isParallelProjectExecutionEnabled()) {
args "--parallel"
}
standardOutput = new IndentingOutputStream(System.out, bwcVersion)
errorOutput = new IndentingOutputStream(System.err, bwcVersion)
configure extraConfig
}
} }
Closure buildBwcTaskName = { projectName -> Closure buildBwcTaskName = { projectName ->
return "buildBwc${projectName.replaceAll(/-\w/){ it[1].toUpperCase() }.capitalize()}" return "buildBwc${projectName.replaceAll(/-\w/) { it[1].toUpperCase() }.capitalize()}"
} }
task buildBwc {} task buildBwc {}
Closure createBuildBwcTask = { projectName, projectDir, projectArtifact -> Closure createBuildBwcTask = { projectName, projectDir, projectArtifact ->
Task bwcTask = createRunBwcGradleTask(buildBwcTaskName(projectName)) { Task bwcTask = createRunBwcGradleTask(buildBwcTaskName(projectName)) {
inputs.file("${project.buildDir}/refspec") inputs.file("${project.buildDir}/refspec")
outputs.files(projectArtifact) outputs.files(projectArtifact)
outputs.cacheIf("BWC distribution caching is disabled on 'master' branch") { outputs.cacheIf("BWC distribution caching is disabled on 'master' branch") {
// Don't bother caching in 'master' since the BWC branches move too quickly to make this cost worthwhile // Don't bother caching in 'master' since the BWC branches move too quickly to make this cost worthwhile
BuildParams.ci && System.getenv('GIT_BRANCH')?.endsWith("master") == false BuildParams.ci && System.getenv('GIT_BRANCH')?.endsWith("master") == false
}
args ":${projectDir.replace('/', ':')}:assemble"
if (project.gradle.startParameter.buildCacheEnabled) {
args "--build-cache"
}
doLast {
if (projectArtifact.exists() == false) {
throw new InvalidUserDataException("Building ${bwcVersion} didn't generate expected file ${projectArtifact}")
}
}
} }
buildBwc.dependsOn bwcTask args ":${projectDir.replace('/', ':')}:assemble"
if (project.gradle.startParameter.buildCacheEnabled) {
args "--build-cache"
}
doLast {
if (projectArtifact.exists() == false) {
throw new InvalidUserDataException("Building ${bwcVersion} didn't generate expected file ${projectArtifact}")
}
}
}
buildBwc.dependsOn bwcTask
} }
Map<String, File> artifactFiles = [:] Map<String, File> artifactFiles = [:]
@ -251,90 +252,91 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
} }
for (String projectName : projects) { for (String projectName : projects) {
String baseDir = "distribution" String baseDir = "distribution"
String classifier = "" String classifier = ""
String extension = projectName String extension = projectName
if (bwcVersion.onOrAfter('7.0.0') && (projectName.contains('zip') || projectName.contains('tar'))) { if (bwcVersion.onOrAfter('7.0.0') && (projectName.contains('zip') || projectName.contains('tar'))) {
int index = projectName.indexOf('-') int index = projectName.indexOf('-')
classifier = "-${projectName.substring(0, index)}-x86_64" classifier = "-${projectName.substring(0, index)}-x86_64"
extension = projectName.substring(index + 1) extension = projectName.substring(index + 1)
if (extension.equals('tar')) { if (extension.equals('tar')) {
extension += '.gz' extension += '.gz'
}
} }
if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('deb')) { }
classifier = "-amd64" if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('deb')) {
} classifier = "-amd64"
if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('rpm')) { }
classifier = "-x86_64" if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('rpm')) {
} classifier = "-x86_64"
if (bwcVersion.onOrAfter('6.3.0')) { }
baseDir += projectName.endsWith('zip') || projectName.endsWith('tar') ? '/archives' : '/packages' if (bwcVersion.onOrAfter('6.3.0')) {
// add oss variant first baseDir += projectName.endsWith('zip') || projectName.endsWith('tar') ? '/archives' : '/packages'
projectDirs.add("${baseDir}/oss-${projectName}") // add oss variant first
File ossProjectArtifact = file("${checkoutDir}/${baseDir}/oss-${projectName}/build/distributions/elasticsearch-oss-${bwcVersion}-SNAPSHOT${classifier}.${extension}") projectDirs.add("${baseDir}/oss-${projectName}")
artifactFiles.put("oss-" + projectName, ossProjectArtifact) File ossProjectArtifact = file("${checkoutDir}/${baseDir}/oss-${projectName}/build/distributions/elasticsearch-oss-${bwcVersion}-SNAPSHOT${classifier}.${extension}")
createBuildBwcTask("oss-${projectName}", "${baseDir}/oss-${projectName}", ossProjectArtifact) artifactFiles.put("oss-" + projectName, ossProjectArtifact)
} createBuildBwcTask("oss-${projectName}", "${baseDir}/oss-${projectName}", ossProjectArtifact)
projectDirs.add("${baseDir}/${projectName}") }
File projectArtifact = file("${checkoutDir}/${baseDir}/${projectName}/build/distributions/elasticsearch-${bwcVersion}-SNAPSHOT${classifier}.${extension}") projectDirs.add("${baseDir}/${projectName}")
artifactFiles.put(projectName, projectArtifact) File projectArtifact = file("${checkoutDir}/${baseDir}/${projectName}/build/distributions/elasticsearch-${bwcVersion}-SNAPSHOT${classifier}.${extension}")
artifactFiles.put(projectName, projectArtifact)
createBuildBwcTask(projectName, "${baseDir}/${projectName}", projectArtifact)
createBuildBwcTask(projectName, "${baseDir}/${projectName}", projectArtifact)
} }
createRunBwcGradleTask("resolveAllBwcDependencies") { createRunBwcGradleTask("resolveAllBwcDependencies") {
args 'resolveAllDependencies' args 'resolveAllDependencies'
} }
Version currentVersion = Version.fromString(version) Version currentVersion = Version.fromString(version)
if (currentVersion.getMinor() == 0 && currentVersion.getRevision() == 0) { if (currentVersion.getMinor() == 0 && currentVersion.getRevision() == 0) {
// We only want to resolve dependencies for live versions of master, without cascading this to older versions // We only want to resolve dependencies for live versions of master, without cascading this to older versions
resolveAllDependencies.dependsOn resolveAllBwcDependencies resolveAllDependencies.dependsOn resolveAllBwcDependencies
} }
for (e in artifactFiles) { for (e in artifactFiles) {
String projectName = e.key String projectName = e.key
String buildBwcTask = buildBwcTaskName(projectName) String buildBwcTask = buildBwcTaskName(projectName)
File artifactFile = e.value File artifactFile = e.value
String artifactFileName = artifactFile.name String artifactFileName = artifactFile.name
String artifactName = artifactFileName.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch' String artifactName = artifactFileName.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch'
String suffix = artifactFile.toString()[-3..-1] String suffix = artifactFile.toString()[-3..-1]
int archIndex = artifactFileName.indexOf('x86_64') int archIndex = artifactFileName.indexOf('x86_64')
String classifier = '' String classifier = ''
if (archIndex != -1) { if (archIndex != -1) {
int osIndex = artifactFileName.lastIndexOf('-', archIndex - 2) int osIndex = artifactFileName.lastIndexOf('-', archIndex - 2)
classifier = "${artifactFileName.substring(osIndex + 1, archIndex - 1)}-x86_64" classifier = "${artifactFileName.substring(osIndex + 1, archIndex - 1)}-x86_64"
} }
configurations.create(projectName) configurations.create(projectName)
artifacts { artifacts {
it.add(projectName, [file: artifactFile, name: artifactName, classifier: classifier, type: suffix, builtBy: buildBwcTask]) it.add(projectName, [file: artifactFile, name: artifactName, classifier: classifier, type: suffix, builtBy: buildBwcTask])
} }
} }
// make sure no dependencies were added to assemble; we want it to be a no-op // make sure no dependencies were added to assemble; we want it to be a no-op
assemble.dependsOn = [] assemble.dependsOn = []
}} }
}
class IndentingOutputStream extends OutputStream { class IndentingOutputStream extends OutputStream {
public final byte[] indent public final byte[] indent
private final OutputStream delegate private final OutputStream delegate
public IndentingOutputStream(OutputStream delegate, Object version) { public IndentingOutputStream(OutputStream delegate, Object version) {
this.delegate = delegate this.delegate = delegate
indent = " [${version}] ".getBytes(StandardCharsets.UTF_8) indent = " [${version}] ".getBytes(StandardCharsets.UTF_8)
} }
@Override @Override
public void write(int b) { public void write(int b) {
write([b] as int[], 0, 1) write([b] as int[], 0, 1)
} }
public void write(int[] bytes, int offset, int length) { public void write(int[] bytes, int offset, int length) {
for (int i = 0; i < bytes.length; i++) { for (int i = 0; i < bytes.length; i++) {
delegate.write(bytes[i]) delegate.write(bytes[i])
if (bytes[i] == '\n') { if (bytes[i] == '\n') {
delegate.write(indent) delegate.write(indent)
} }
}
} }
}
} }

View file

@ -23,7 +23,7 @@ dependencies {
} }
ext.expansions = { oss, ubi, local -> ext.expansions = { oss, ubi, local ->
final String classifier = 'linux-x86_64' final String classifier = 'linux-x86_64'
final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz" final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
return [ return [
'base_image' : ubi ? 'registry.access.redhat.com/ubi7/ubi-minimal:7.7' : 'centos:7', 'base_image' : ubi ? 'registry.access.redhat.com/ubi7/ubi-minimal:7.7' : 'centos:7',
@ -38,11 +38,11 @@ ext.expansions = { oss, ubi, local ->
} }
private static String buildPath(final boolean oss, final boolean ubi) { private static String buildPath(final boolean oss, final boolean ubi) {
return "build/${ oss ? 'oss-' : ''}${ ubi ? 'ubi-' : ''}docker" return "build/${oss ? 'oss-' : ''}${ubi ? 'ubi-' : ''}docker"
} }
private static String taskName(final String prefix, final boolean oss, final boolean ubi, final String suffix) { private static String taskName(final String prefix, final boolean oss, final boolean ubi, final String suffix) {
return "${prefix}${oss ? 'Oss' : ''}${ubi ? 'Ubi': ''}${suffix}" return "${prefix}${oss ? 'Oss' : ''}${ubi ? 'Ubi' : ''}${suffix}"
} }
project.ext { project.ext {
@ -55,7 +55,7 @@ project.ext {
into('config') { into('config') {
/* /*
* Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration * Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration
* by creating config files in oss or default build-context sub-modules. * by creating config files in oss or default build-context sub-modules.
*/ */
from project.projectDir.toPath().resolve("src/docker/config") from project.projectDir.toPath().resolve("src/docker/config")
if (oss) { if (oss) {
@ -89,7 +89,7 @@ void addCopyDockerContextTask(final boolean oss, final boolean ubi) {
} }
} }
def createAndSetWritable (Object... locations) { def createAndSetWritable(Object... locations) {
locations.each { location -> locations.each { location ->
File file = file(location) File file = file(location)
file.mkdirs() file.mkdirs()
@ -99,7 +99,7 @@ def createAndSetWritable (Object... locations) {
task copyKeystore(type: Sync) { task copyKeystore(type: Sync) {
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
into "${buildDir}/certs" into "${buildDir}/certs"
doLast { doLast {
file("${buildDir}/certs").setReadable(true, false) file("${buildDir}/certs").setReadable(true, false)
@ -115,26 +115,26 @@ preProcessFixture {
doLast { doLast {
// tests expect to have an empty repo // tests expect to have an empty repo
project.delete( project.delete(
"${buildDir}/repo", "${buildDir}/repo",
"${buildDir}/oss-repo" "${buildDir}/oss-repo"
) )
createAndSetWritable( createAndSetWritable(
"${buildDir}/repo", "${buildDir}/repo",
"${buildDir}/oss-repo", "${buildDir}/oss-repo",
"${buildDir}/logs/default-1", "${buildDir}/logs/default-1",
"${buildDir}/logs/default-2", "${buildDir}/logs/default-2",
"${buildDir}/logs/oss-1", "${buildDir}/logs/oss-1",
"${buildDir}/logs/oss-2" "${buildDir}/logs/oss-2"
) )
} }
} }
processTestResources { processTestResources {
from ({ zipTree(configurations.restSpec.singleFile) }) { from({ zipTree(configurations.restSpec.singleFile) }) {
include 'rest-api-spec/api/**' include 'rest-api-spec/api/**'
} }
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
dependsOn configurations.restSpec dependsOn configurations.restSpec
} }
@ -154,7 +154,7 @@ void addBuildDockerImage(final boolean oss, final boolean ubi) {
dependsOn taskName("copy", oss, ubi, "DockerContext") dependsOn taskName("copy", oss, ubi, "DockerContext")
List<String> tags List<String> tags
if (oss) { if (oss) {
tags = [ tags = [
"docker.elastic.co/elasticsearch/elasticsearch-oss${ubi ? '-ubi7' : ''}:${VersionProperties.elasticsearch}", "docker.elastic.co/elasticsearch/elasticsearch-oss${ubi ? '-ubi7' : ''}:${VersionProperties.elasticsearch}",
"elasticsearch-oss${ubi ? '-ubi7' : ''}:test" "elasticsearch-oss${ubi ? '-ubi7' : ''}:test"
] ]
@ -186,10 +186,10 @@ for (final boolean oss : [false, true]) {
} }
// We build the images used in compose locally, but the pull command insists on using a repository // We build the images used in compose locally, but the pull command insists on using a repository
// thus we must disable it to prevent it from doing so. // thus we must disable it to prevent it from doing so.
// Everything will still be pulled since we will build the local images on a pull // Everything will still be pulled since we will build the local images on a pull
if (tasks.findByName("composePull")) { if (tasks.findByName("composePull")) {
tasks.composePull.enabled = false tasks.composePull.enabled = false
} }
/* /*
@ -218,9 +218,9 @@ subprojects { Project subProject ->
exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName)) exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName))
artifacts.add('default', file(tarFile)) { artifacts.add('default', file(tarFile)) {
type 'tar' type 'tar'
name "elasticsearch${oss ? '-oss' : ''}${ubi ? '-ubi7' : ''}" name "elasticsearch${oss ? '-oss' : ''}${ubi ? '-ubi7' : ''}"
builtBy exportTaskName builtBy exportTaskName
} }
assemble.dependsOn exportTaskName assemble.dependsOn exportTaskName

View file

@ -1,11 +1,11 @@
apply plugin: 'base' apply plugin: 'base'
task buildDockerBuildContext(type: Tar) { task buildDockerBuildContext(type: Tar) {
extension = 'tar.gz' extension = 'tar.gz'
compression = Compression.GZIP compression = Compression.GZIP
archiveClassifier = "docker-build-context" archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch" archiveBaseName = "elasticsearch"
with dockerBuildContext(false, false, false) with dockerBuildContext(false, false, false)
} }
assemble.dependsOn buildDockerBuildContext assemble.dependsOn buildDockerBuildContext

View file

@ -1,11 +1,11 @@
apply plugin: 'base' apply plugin: 'base'
task buildOssDockerBuildContext(type: Tar) { task buildOssDockerBuildContext(type: Tar) {
extension = 'tar.gz' extension = 'tar.gz'
compression = Compression.GZIP compression = Compression.GZIP
archiveClassifier = "docker-build-context" archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss" archiveBaseName = "elasticsearch-oss"
with dockerBuildContext(true, false, false) with dockerBuildContext(true, false, false)
} }
assemble.dependsOn buildOssDockerBuildContext assemble.dependsOn buildOssDockerBuildContext

View file

@ -1,11 +1,11 @@
apply plugin: 'base' apply plugin: 'base'
task buildOssUbiDockerBuildContext(type: Tar) { task buildOssUbiDockerBuildContext(type: Tar) {
extension = 'tar.gz' extension = 'tar.gz'
compression = Compression.GZIP compression = Compression.GZIP
archiveClassifier = "docker-build-context" archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss-ubi7" archiveBaseName = "elasticsearch-oss-ubi7"
with dockerBuildContext(true, true, false) with dockerBuildContext(true, true, false)
} }
assemble.dependsOn buildOssUbiDockerBuildContext assemble.dependsOn buildOssUbiDockerBuildContext

View file

@ -1,11 +1,11 @@
apply plugin: 'base' apply plugin: 'base'
task buildUbiDockerBuildContext(type: Tar) { task buildUbiDockerBuildContext(type: Tar) {
extension = 'tar.gz' extension = 'tar.gz'
compression = Compression.GZIP compression = Compression.GZIP
archiveClassifier = "docker-build-context" archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-ubi7" archiveBaseName = "elasticsearch-ubi7"
with dockerBuildContext(false, true, false) with dockerBuildContext(false, true, false)
} }
assemble.dependsOn buildUbiDockerBuildContext assemble.dependsOn buildUbiDockerBuildContext

View file

@ -92,6 +92,7 @@ void addProcessFilesTask(String type, boolean oss, boolean jdk) {
} }
} }
} }
addProcessFilesTask('deb', true, true) addProcessFilesTask('deb', true, true)
addProcessFilesTask('deb', true, false) addProcessFilesTask('deb', true, false)
addProcessFilesTask('deb', false, true) addProcessFilesTask('deb', false, true)
@ -111,7 +112,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
} }
dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
packageName "elasticsearch${oss ? '-oss' : ''}" packageName "elasticsearch${oss ? '-oss' : ''}"
arch (type == 'deb' ? 'amd64' : 'X86_64') arch(type == 'deb' ? 'amd64' : 'X86_64')
// Follow elasticsearch's file naming convention // Follow elasticsearch's file naming convention
String jdkString = jdk ? "" : "no-jdk-" String jdkString = jdk ? "" : "no-jdk-"
archiveName "${packageName}-${project.version}-${jdkString}${archString}.${type}" archiveName "${packageName}-${project.version}-${jdkString}${archString}.${type}"
@ -193,16 +194,16 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
configurationFile '/etc/elasticsearch/users' configurationFile '/etc/elasticsearch/users'
configurationFile '/etc/elasticsearch/users_roles' configurationFile '/etc/elasticsearch/users_roles'
} }
from("${packagingFiles}") { from("${packagingFiles}") {
dirMode 02750 dirMode 02750
into('/etc') into('/etc')
permissionGroup 'elasticsearch' permissionGroup 'elasticsearch'
includeEmptyDirs true includeEmptyDirs true
createDirectoryEntry true createDirectoryEntry true
include("elasticsearch") // empty dir, just to add directory entry include("elasticsearch") // empty dir, just to add directory entry
} }
from("${packagingFiles}/etc/elasticsearch") { from("${packagingFiles}/etc/elasticsearch") {
into('/etc/elasticsearch') into('/etc/elasticsearch')
dirMode 02750 dirMode 02750
fileMode 0660 fileMode 0660
permissionGroup 'elasticsearch' permissionGroup 'elasticsearch'
@ -288,8 +289,8 @@ ospackage {
signingKeyId = project.hasProperty('signing.keyId') ? project.property('signing.keyId') : 'D88E42B4' signingKeyId = project.hasProperty('signing.keyId') ? project.property('signing.keyId') : 'D88E42B4'
signingKeyPassphrase = project.property('signing.password') signingKeyPassphrase = project.property('signing.password')
signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ? signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ?
project.file(project.property('signing.secretKeyRingFile')) : project.file(project.property('signing.secretKeyRingFile')) :
new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg') new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg')
} }
// version found on oldest supported distro, centos-6 // version found on oldest supported distro, centos-6
@ -442,16 +443,16 @@ subprojects {
onlyIf rpmExists onlyIf rpmExists
final File rpmDatabase = new File(extractionDir, 'rpm-database') final File rpmDatabase = new File(extractionDir, 'rpm-database')
commandLine 'rpm', commandLine 'rpm',
'--badreloc', '--badreloc',
'--nodeps', '--nodeps',
'--noscripts', '--noscripts',
'--notriggers', '--notriggers',
'--dbpath', '--dbpath',
rpmDatabase, rpmDatabase,
'--relocate', '--relocate',
"/=${packageExtractionDir}", "/=${packageExtractionDir}",
'-i', '-i',
"${-> buildDist.outputs.files.singleFile}" "${-> buildDist.outputs.files.singleFile}"
} }
} }
@ -476,8 +477,8 @@ subprojects {
licenseFilename = "ELASTIC-LICENSE.txt" licenseFilename = "ELASTIC-LICENSE.txt"
} }
final List<String> header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", final List<String> header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/",
"Copyright: Elasticsearch B.V. <info@elastic.co>", "Copyright: Elasticsearch B.V. <info@elastic.co>",
"License: " + expectedLicense) "License: " + expectedLicense)
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
final List<String> expectedLines = header + licenseLines.collect { " " + it } final List<String> expectedLines = header + licenseLines.collect { " " + it }
assertLinesInFile(copyrightPath, expectedLines) assertLinesInFile(copyrightPath, expectedLines)

View file

@ -4,7 +4,7 @@ targetCompatibility = JavaVersion.VERSION_1_7
// java_version_checker do not depend on core so only JDK signatures should be checked // java_version_checker do not depend on core so only JDK signatures should be checked
forbiddenApisMain { forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }
test.enabled = false test.enabled = false

View file

@ -21,25 +21,25 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
dependencies { dependencies {
compile parent.project('java-version-checker') compile parent.project('java-version-checker')
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}" testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
} }
archivesBaseName = 'elasticsearch-launchers' archivesBaseName = 'elasticsearch-launchers'
tasks.withType(CheckForbiddenApis) { tasks.withType(CheckForbiddenApis) {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }
testingConventions { testingConventions {
naming.clear() naming.clear()
naming { naming {
Tests { Tests {
baseClass 'org.elasticsearch.tools.launchers.LaunchersTestCase' baseClass 'org.elasticsearch.tools.launchers.LaunchersTestCase'
}
} }
}
} }
javadoc.enabled = false javadoc.enabled = false

View file

@ -45,7 +45,7 @@ test {
thirdPartyAudit.onlyIf { thirdPartyAudit.onlyIf {
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
// rather than provide a long list of exclusions, disable the check on FIPS. // rather than provide a long list of exclusions, disable the check on FIPS.
BuildParams.inFipsJvm BuildParams.inFipsJvm
} }
/* /*
@ -61,14 +61,14 @@ thirdPartyAudit.onlyIf {
* *
*/ */
thirdPartyAudit.ignoreViolations( thirdPartyAudit.ignoreViolations(
'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom', 'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator', 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2' 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2'
) )

View file

@ -23,18 +23,18 @@ apply plugin: 'elasticsearch.docs-test'
/* List of files that have snippets that will not work until platinum tests can occur ... */ /* List of files that have snippets that will not work until platinum tests can occur ... */
buildRestTests.expectedUnconvertedCandidates = [ buildRestTests.expectedUnconvertedCandidates = [
'reference/ml/anomaly-detection/transforms.asciidoc', 'reference/ml/anomaly-detection/transforms.asciidoc',
'reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc', 'reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc',
'reference/ml/anomaly-detection/apis/get-bucket.asciidoc', 'reference/ml/anomaly-detection/apis/get-bucket.asciidoc',
'reference/ml/anomaly-detection/apis/get-category.asciidoc', 'reference/ml/anomaly-detection/apis/get-category.asciidoc',
'reference/ml/anomaly-detection/apis/get-influencer.asciidoc', 'reference/ml/anomaly-detection/apis/get-influencer.asciidoc',
'reference/ml/anomaly-detection/apis/get-job-stats.asciidoc', 'reference/ml/anomaly-detection/apis/get-job-stats.asciidoc',
'reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc', 'reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc',
'reference/ml/anomaly-detection/apis/get-record.asciidoc', 'reference/ml/anomaly-detection/apis/get-record.asciidoc',
'reference/ml/anomaly-detection/apis/get-snapshot.asciidoc', 'reference/ml/anomaly-detection/apis/get-snapshot.asciidoc',
'reference/ml/anomaly-detection/apis/post-data.asciidoc', 'reference/ml/anomaly-detection/apis/post-data.asciidoc',
'reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc', 'reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc',
'reference/ml/anomaly-detection/apis/update-snapshot.asciidoc', 'reference/ml/anomaly-detection/apis/update-snapshot.asciidoc',
] ]
testClusters.integTest { testClusters.integTest {
@ -118,20 +118,20 @@ Closure setupTwitter = { String name, int count ->
index: twitter index: twitter
refresh: true refresh: true
body: |''' body: |'''
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
String user, text String user, text
if (i == 0) { if (i == 0) {
user = 'kimchy' user = 'kimchy'
text = 'trying out Elasticsearch' text = 'trying out Elasticsearch'
} else { } else {
user = 'test' user = 'test'
text = "some message with the number $i" text = "some message with the number $i"
} }
buildRestTests.setups[name] += """ buildRestTests.setups[name] += """
{"index":{"_id": "$i"}} {"index":{"_id": "$i"}}
{"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}""" {"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}"""
}
} }
}
setupTwitter('twitter', 5) setupTwitter('twitter', 5)
setupTwitter('big_twitter', 120) setupTwitter('big_twitter', 120)
setupTwitter('huge_twitter', 1200) setupTwitter('huge_twitter', 1200)
@ -503,7 +503,7 @@ buildRestTests.setups['latency'] = '''
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
def value = i def value = i
if (i % 10) { if (i % 10) {
value = i*10 value = i * 10
} }
buildRestTests.setups['latency'] += """ buildRestTests.setups['latency'] += """
{"index":{}} {"index":{}}
@ -960,7 +960,7 @@ buildRestTests.setups['farequote_datafeed'] = buildRestTests.setups['farequote_j
"job_id":"farequote", "job_id":"farequote",
"indexes":"farequote" "indexes":"farequote"
} }
''' '''
buildRestTests.setups['server_metrics_index'] = ''' buildRestTests.setups['server_metrics_index'] = '''
- do: - do:
indices.create: indices.create:

View file

@ -3,36 +3,36 @@ import java.nio.file.Files
String buildNumber = System.getenv('BUILD_NUMBER') String buildNumber = System.getenv('BUILD_NUMBER')
if (buildNumber) { if (buildNumber) {
File uploadFile = file("build/${buildNumber}.tar.bz2") File uploadFile = file("build/${buildNumber}.tar.bz2")
project.gradle.buildFinished { result -> project.gradle.buildFinished { result ->
println "build complete, generating: $uploadFile" println "build complete, generating: $uploadFile"
if (uploadFile.exists()) { if (uploadFile.exists()) {
project.delete(uploadFile) project.delete(uploadFile)
}
try {
ant.tar(destfile: uploadFile, compression: "bzip2", longfile: "gnu") {
fileset(dir: projectDir) {
fileTree(projectDir)
.include("**/*.hprof")
.include("**/reaper.log")
.include("**/build/testclusters/**")
.exclude("**/build/testclusters/**/data/**")
.exclude("**/build/testclusters/**/distro/**")
.exclude("**/build/testclusters/**/repo/**")
.exclude("**/build/testclusters/**/extract/**")
.filter { Files.isRegularFile(it.toPath()) }
.each {
include(name: projectDir.toPath().relativize(it.toPath()))
}
}
fileset(dir: "${gradle.gradleUserHomeDir}/daemon/${gradle.gradleVersion}", followsymlinks: false) {
include(name: "**/daemon-${ProcessHandle.current().pid()}*.log")
}
}
} catch (Exception e) {
logger.lifecycle("Failed to archive additional logs", e)
}
} }
try {
ant.tar(destfile: uploadFile, compression: "bzip2", longfile: "gnu") {
fileset(dir: projectDir) {
fileTree(projectDir)
.include("**/*.hprof")
.include("**/reaper.log")
.include("**/build/testclusters/**")
.exclude("**/build/testclusters/**/data/**")
.exclude("**/build/testclusters/**/distro/**")
.exclude("**/build/testclusters/**/repo/**")
.exclude("**/build/testclusters/**/extract/**")
.filter { Files.isRegularFile(it.toPath()) }
.each {
include(name: projectDir.toPath().relativize(it.toPath()))
}
}
fileset(dir: "${gradle.gradleUserHomeDir}/daemon/${gradle.gradleVersion}", followsymlinks: false) {
include(name: "**/daemon-${ProcessHandle.current().pid()}*.log")
}
}
} catch (Exception e) {
logger.lifecycle("Failed to archive additional logs", e)
}
}
} }

View file

@ -7,73 +7,73 @@ import java.util.concurrent.TimeUnit
long startTime = project.gradle.services.get(BuildRequestMetaData.class).getStartTime() long startTime = project.gradle.services.get(BuildRequestMetaData.class).getStartTime()
buildScan { buildScan {
URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null
String buildNumber = System.getenv('BUILD_NUMBER') String buildNumber = System.getenv('BUILD_NUMBER')
String buildUrl = System.getenv('BUILD_URL') String buildUrl = System.getenv('BUILD_URL')
String jobName = System.getenv('JOB_NAME') String jobName = System.getenv('JOB_NAME')
String nodeName = System.getenv('NODE_NAME') String nodeName = System.getenv('NODE_NAME')
tag OS.current().name() tag OS.current().name()
if (jobName) { if (jobName) {
value 'Job name', jobName value 'Job name', jobName
}
if (buildNumber) {
value 'Job number', buildNumber
}
if (jenkinsUrl?.host?.endsWith('elastic.co')) {
publishAlways()
buildScan.server = 'https://gradle-enterprise.elastic.co'
}
if (nodeName) {
link 'System logs', "https://infra-stats.elastic.co/app/infra#/logs?" +
"&logFilter=(expression:'host.name:${nodeName}',kind:kuery)"
buildFinished {
link 'System metrics', "https://infra-stats.elastic.co/app/infra#/metrics/host/" +
"${nodeName}?_g=()&metricTime=(autoReload:!f,refreshInterval:5000," +
"time:(from:${startTime - TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)},interval:%3E%3D1m," +
"to:${System.currentTimeMillis() + TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)}))"
} }
if(buildNumber) { }
value 'Job number', buildNumber
// Jenkins-specific build scan metadata
if (jenkinsUrl) {
tag 'CI'
tag jobName
link 'Jenkins Build', buildUrl
link 'GCP Upload', "https://console.cloud.google.com/storage/elasticsearch-ci-artifacts/jobs/${jobName}/build/${buildNumber}.tar.bz2"
System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each {
value 'Jenkins Worker Label', it
} }
if (jenkinsUrl?.host?.endsWith('elastic.co')) { // Add SCM information
publishAlways() def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null
buildScan.server = 'https://gradle-enterprise.elastic.co' if (isPrBuild) {
} value 'Git Commit ID', System.getenv('ghprbActualCommit')
value 'Git Branch', System.getenv('ghprbTargetBranch')
if (nodeName) { tag System.getenv('ghprbTargetBranch')
link 'System logs', "https://infra-stats.elastic.co/app/infra#/logs?" + tag "pr/${System.getenv('ghprbPullId')}"
"&logFilter=(expression:'host.name:${nodeName}',kind:kuery)" tag 'pull-request'
buildFinished { link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}"
link 'System metrics', "https://infra-stats.elastic.co/app/infra#/metrics/host/" + link 'Pull Request', System.getenv('ghprbPullLink')
"${nodeName}?_g=()&metricTime=(autoReload:!f,refreshInterval:5000," +
"time:(from:${startTime - TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)},interval:%3E%3D1m," +
"to:${System.currentTimeMillis() + TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)}))"
}
}
// Jenkins-specific build scan metadata
if (jenkinsUrl) {
tag 'CI'
tag jobName
link 'Jenkins Build', buildUrl
link 'GCP Upload', "https://console.cloud.google.com/storage/elasticsearch-ci-artifacts/jobs/${jobName}/build/${buildNumber}.tar.bz2"
System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each {
value 'Jenkins Worker Label', it
}
// Add SCM information
def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null
if (isPrBuild) {
value 'Git Commit ID', System.getenv('ghprbActualCommit')
value 'Git Branch', System.getenv('ghprbTargetBranch')
tag System.getenv('ghprbTargetBranch')
tag "pr/${System.getenv('ghprbPullId')}"
tag 'pull-request'
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}"
link 'Pull Request', System.getenv('ghprbPullLink')
} else {
if (System.getenv('GIT_BRANCH')) {
def branch = System.getenv('GIT_BRANCH').split('/').last()
value 'Git Branch', branch
tag branch
}
if (System.getenv('GIT_COMMIT')) {
value 'Git Commit ID', System.getenv('GIT_COMMIT')
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('GIT_COMMIT')}"
background {
def changes = "git diff --name-only ${System.getenv('GIT_PREVIOUS_COMMIT')}..${System.getenv('GIT_COMMIT')}".execute().text.trim()
value 'Git Changes', changes
}
}
}
} else { } else {
tag 'LOCAL' if (System.getenv('GIT_BRANCH')) {
def branch = System.getenv('GIT_BRANCH').split('/').last()
value 'Git Branch', branch
tag branch
}
if (System.getenv('GIT_COMMIT')) {
value 'Git Commit ID', System.getenv('GIT_COMMIT')
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('GIT_COMMIT')}"
background {
def changes = "git diff --name-only ${System.getenv('GIT_PREVIOUS_COMMIT')}..${System.getenv('GIT_COMMIT')}".execute().text.trim()
value 'Git Changes', changes
}
}
} }
} else {
tag 'LOCAL'
}
} }

View file

@ -18,29 +18,29 @@
*/ */
subprojects { subprojects {
/* /*
* All subprojects are java projects using Elasticsearch's standard build * All subprojects are java projects using Elasticsearch's standard build
* tools. * tools.
*/ */
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
/* /*
* Subprojects may depend on the "core" lib but may not depend on any * Subprojects may depend on the "core" lib but may not depend on any
* other libs. This keeps our dependencies simpler. * other libs. This keeps our dependencies simpler.
*/ */
project.afterEvaluate { project.afterEvaluate {
configurations.all { Configuration conf -> configurations.all { Configuration conf ->
dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep -> dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep ->
Project depProject = dep.dependencyProject Project depProject = dep.dependencyProject
if (depProject != null if (depProject != null
&& false == depProject.path.equals(':libs:elasticsearch-core') && false == depProject.path.equals(':libs:elasticsearch-core')
&& depProject.path.startsWith(':libs')) { && depProject.path.startsWith(':libs')) {
throw new InvalidUserDataException("projects in :libs " throw new InvalidUserDataException("projects in :libs "
+ "may not depend on other projects libs except " + "may not depend on other projects libs except "
+ ":libs:elasticsearch-core but " + ":libs:elasticsearch-core but "
+ "${project.path} depends on ${depProject.path}") + "${project.path} depends on ${depProject.path}")
}
}
} }
}
} }
}
} }

View file

@ -22,8 +22,8 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
dependencies { dependencies {
compile 'net.sf.jopt-simple:jopt-simple:5.0.2' compile 'net.sf.jopt-simple:jopt-simple:5.0.2'
compile project(':libs:elasticsearch-core') compile project(':libs:elasticsearch-core')
} }
test.enabled = false test.enabled = false
@ -31,5 +31,5 @@ test.enabled = false
jarHell.enabled = false jarHell.enabled = false
forbiddenApisMain { forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }

View file

@ -22,33 +22,33 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
dependencies { dependencies {
// This dependency is used only by :libs:core for null-checking interop with other tools // This dependency is used only by :libs:core for null-checking interop with other tools
compileOnly "com.google.code.findbugs:jsr305:3.0.2" compileOnly "com.google.code.findbugs:jsr305:3.0.2"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}" testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-core' exclude group: 'org.elasticsearch', module: 'elasticsearch-core'
} }
} }
forbiddenApisMain { forbiddenApisMain {
// :libs:elasticsearch-core does not depend on server // :libs:elasticsearch-core does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to server // TODO: Need to decide how we want to handle for forbidden signatures with the changes to server
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// from log4j // from log4j
'org/osgi/framework/AdaptPermission', 'org/osgi/framework/AdaptPermission',
'org/osgi/framework/AdminPermission', 'org/osgi/framework/AdminPermission',
'org/osgi/framework/Bundle', 'org/osgi/framework/Bundle',
'org/osgi/framework/BundleActivator', 'org/osgi/framework/BundleActivator',
'org/osgi/framework/BundleContext', 'org/osgi/framework/BundleContext',
'org/osgi/framework/BundleEvent', 'org/osgi/framework/BundleEvent',
'org/osgi/framework/SynchronousBundleListener', 'org/osgi/framework/SynchronousBundleListener',
'org/osgi/framework/wiring/BundleWire', 'org/osgi/framework/wiring/BundleWire',
'org/osgi/framework/wiring/BundleWiring' 'org/osgi/framework/wiring/BundleWiring'
) )

View file

@ -18,15 +18,15 @@
*/ */
dependencies { dependencies {
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-dissect' exclude group: 'org.elasticsearch', module: 'elasticsearch-dissect'
} }
testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
testCompile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" testCompile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
testCompile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" testCompile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
} }
forbiddenApisMain { forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }

View file

@ -22,14 +22,14 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
dependencies { dependencies {
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-geo' exclude group: 'org.elasticsearch', module: 'elasticsearch-geo'
} }
} }
forbiddenApisMain { forbiddenApisMain {
// geo does not depend on server // geo does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to core // TODO: Need to decide how we want to handle for forbidden signatures with the changes to core
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }

View file

@ -18,15 +18,15 @@
*/ */
dependencies { dependencies {
compile 'org.jruby.joni:joni:2.1.29' compile 'org.jruby.joni:joni:2.1.29'
// joni dependencies: // joni dependencies:
compile 'org.jruby.jcodings:jcodings:1.0.44' compile 'org.jruby.jcodings:jcodings:1.0.44'
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-grok' exclude group: 'org.elasticsearch', module: 'elasticsearch-grok'
} }
} }
forbiddenApisMain { forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }

View file

@ -27,7 +27,7 @@ dependencies {
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-nio' exclude group: 'org.elasticsearch', module: 'elasticsearch-nio'
} }
} }

View file

@ -20,29 +20,29 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
dependencies { dependencies {
// do not add non-test compile dependencies to secure-sm without a good reason to do so // do not add non-test compile dependencies to secure-sm without a good reason to do so
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}" testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-secure-sm' exclude group: 'org.elasticsearch', module: 'elasticsearch-secure-sm'
} }
} }
forbiddenApisMain { forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }
// JAR hell is part of core which we do not want to add as a dependency // JAR hell is part of core which we do not want to add as a dependency
jarHell.enabled = false jarHell.enabled = false
testingConventions { testingConventions {
naming.clear() naming.clear()
naming { naming {
Tests { Tests {
baseClass 'junit.framework.TestCase' baseClass 'junit.framework.TestCase'
}
} }
}
} }

View file

@ -19,25 +19,25 @@
apply plugin: "nebula.maven-scm" apply plugin: "nebula.maven-scm"
dependencies { dependencies {
compile project(':libs:elasticsearch-core') compile project(':libs:elasticsearch-core')
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-ssl-config' exclude group: 'org.elasticsearch', module: 'elasticsearch-ssl-config'
} }
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}" testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
} }
forbiddenApisMain { forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }
forbiddenPatterns { forbiddenPatterns {
exclude '**/*.key' exclude '**/*.key'
exclude '**/*.pem' exclude '**/*.pem'
exclude '**/*.p12' exclude '**/*.p12'
exclude '**/*.jks' exclude '**/*.jks'
} }

View file

@ -22,33 +22,33 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
dependencies { dependencies {
compile project(':libs:elasticsearch-core') compile project(':libs:elasticsearch-core')
compile "org.yaml:snakeyaml:${versions.snakeyaml}" compile "org.yaml:snakeyaml:${versions.snakeyaml}"
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}" compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}" compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}" compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}" testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) { testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-x-content' exclude group: 'org.elasticsearch', module: 'elasticsearch-x-content'
} }
} }
forbiddenApisMain { forbiddenApisMain {
// x-content does not depend on server // x-content does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to core // TODO: Need to decide how we want to handle for forbidden signatures with the changes to core
replaceSignatureFiles 'jdk-signatures' replaceSignatureFiles 'jdk-signatures'
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
'com.fasterxml.jackson.databind.ObjectMapper', 'com.fasterxml.jackson.databind.ObjectMapper',
) )
dependencyLicenses { dependencyLicenses {

View file

@ -18,6 +18,6 @@
*/ */
esplugin { esplugin {
description 'Adds aggregations whose input are a list of numeric fields and output includes a matrix.' description 'Adds aggregations whose input are a list of numeric fields and output includes a matrix.'
classname 'org.elasticsearch.search.aggregations.matrix.MatrixAggregationPlugin' classname 'org.elasticsearch.search.aggregations.matrix.MatrixAggregationPlugin'
} }

View file

@ -18,11 +18,11 @@
*/ */
esplugin { esplugin {
description 'Adds "built in" analyzers to Elasticsearch.' description 'Adds "built in" analyzers to Elasticsearch.'
classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin' classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin'
extendedPlugins = ['lang-painless'] extendedPlugins = ['lang-painless']
} }
dependencies { dependencies {
compileOnly project(':modules:lang-painless') compileOnly project(':modules:lang-painless')
} }

View file

@ -19,7 +19,7 @@
configure(subprojects.findAll { it.parent.path == project.path }) { configure(subprojects.findAll { it.parent.path == project.path }) {
group = 'org.elasticsearch.plugin' // for modules which publish client jars group = 'org.elasticsearch.plugin' // for modules which publish client jars
apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
@ -28,12 +28,12 @@ configure(subprojects.findAll { it.parent.path == project.path }) {
} }
if (project.file('src/main/packaging').exists()) { if (project.file('src/main/packaging').exists()) {
throw new InvalidModelException("Modules cannot contain packaging files") throw new InvalidModelException("Modules cannot contain packaging files")
} }
if (project.file('src/main/bin').exists()) { if (project.file('src/main/bin').exists()) {
throw new InvalidModelException("Modules cannot contain bin files") throw new InvalidModelException("Modules cannot contain bin files")
} }
if (project.file('src/main/config').exists()) { if (project.file('src/main/config').exists()) {
throw new InvalidModelException("Modules cannot contain config files") throw new InvalidModelException("Modules cannot contain config files")
} }
} }

View file

@ -18,13 +18,13 @@
*/ */
esplugin { esplugin {
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources' description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin' classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
extendedPlugins = ['lang-painless'] extendedPlugins = ['lang-painless']
} }
dependencies { dependencies {
compileOnly project(':modules:lang-painless') compileOnly project(':modules:lang-painless')
compile project(':libs:elasticsearch-grok') compile project(':libs:elasticsearch-grok')
compile project(':libs:elasticsearch-dissect') compile project(':libs:elasticsearch-dissect')
} }

View file

@ -26,7 +26,7 @@ esplugin {
dependencies { dependencies {
// Upgrade to 2.10.0 or higher when jackson-core gets upgraded to 2.9.x. Blocked by #27032 // Upgrade to 2.10.0 or higher when jackson-core gets upgraded to 2.9.x. Blocked by #27032
compile ('com.maxmind.geoip2:geoip2:2.9.0') compile('com.maxmind.geoip2:geoip2:2.9.0')
// geoip2 dependencies: // geoip2 dependencies:
compile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") compile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
compile("com.fasterxml.jackson.core:jackson-databind:2.8.11.3") compile("com.fasterxml.jackson.core:jackson-databind:2.8.11.3")
@ -36,7 +36,7 @@ dependencies {
} }
task copyDefaultGeoIp2DatabaseFiles(type: Copy) { task copyDefaultGeoIp2DatabaseFiles(type: Copy) {
from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) } from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases') }) }
into "${project.buildDir}/ingest-geoip" into "${project.buildDir}/ingest-geoip"
include "*.mmdb" include "*.mmdb"
} }
@ -49,7 +49,7 @@ bundlePlugin {
} }
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// geoip WebServiceClient needs apache http client, but we're not using WebServiceClient: // geoip WebServiceClient needs apache http client, but we're not using WebServiceClient:
'org.apache.http.HttpEntity', 'org.apache.http.HttpEntity',
'org.apache.http.HttpHost', 'org.apache.http.HttpHost',

View file

@ -17,6 +17,7 @@
* under the License. * under the License.
*/ */
import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask; import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask;
esplugin { esplugin {
description 'An easy, safe and fast scripting language for Elasticsearch' description 'An easy, safe and fast scripting language for Elasticsearch'
classname 'org.elasticsearch.painless.PainlessPlugin' classname 'org.elasticsearch.painless.PainlessPlugin'
@ -75,12 +76,12 @@ dependencies {
testClusters { testClusters {
generateContextCluster { generateContextCluster {
testDistribution = 'DEFAULT' testDistribution = 'DEFAULT'
} }
} }
task generateContextDoc(type: DefaultTestClustersTask) { task generateContextDoc(type: DefaultTestClustersTask) {
useCluster testClusters.generateContextCluster useCluster testClusters.generateContextCluster
doFirst { doFirst {
project.javaexec { project.javaexec {
main = 'org.elasticsearch.painless.ContextDocGenerator' main = 'org.elasticsearch.painless.ContextDocGenerator'
@ -123,9 +124,9 @@ task regenLexer(type: JavaExec) {
systemProperty 'user.country', 'US' systemProperty 'user.country', 'US'
systemProperty 'user.variant', '' systemProperty 'user.variant', ''
args '-Werror', args '-Werror',
'-package', 'org.elasticsearch.painless.antlr', '-package', 'org.elasticsearch.painless.antlr',
'-o', outputPath, '-o', outputPath,
"${file(grammarPath)}/PainlessLexer.g4" "${file(grammarPath)}/PainlessLexer.g4"
} }
task regenParser(type: JavaExec) { task regenParser(type: JavaExec) {
@ -137,12 +138,12 @@ task regenParser(type: JavaExec) {
systemProperty 'user.country', 'US' systemProperty 'user.country', 'US'
systemProperty 'user.variant', '' systemProperty 'user.variant', ''
args '-Werror', args '-Werror',
'-package', 'org.elasticsearch.painless.antlr', '-package', 'org.elasticsearch.painless.antlr',
'-no-listener', '-no-listener',
'-visitor', '-visitor',
// '-Xlog', // '-Xlog',
'-o', outputPath, '-o', outputPath,
"${file(grammarPath)}/PainlessParser.g4" "${file(grammarPath)}/PainlessParser.g4"
} }
task regen { task regen {
@ -153,20 +154,20 @@ task regen {
ant.move(file: "${outputPath}/PainlessParser.tokens", toDir: grammarPath) ant.move(file: "${outputPath}/PainlessParser.tokens", toDir: grammarPath)
// make the generated classes package private // make the generated classes package private
ant.replaceregexp(match: 'public ((interface|class) \\QPainless\\E\\w+)', ant.replaceregexp(match: 'public ((interface|class) \\QPainless\\E\\w+)',
replace: '\\1', replace: '\\1',
encoding: 'UTF-8') { encoding: 'UTF-8') {
fileset(dir: outputPath, includes: 'Painless*.java') fileset(dir: outputPath, includes: 'Painless*.java')
} }
// make the lexer abstract // make the lexer abstract
ant.replaceregexp(match: '(class \\QPainless\\ELexer)', ant.replaceregexp(match: '(class \\QPainless\\ELexer)',
replace: 'abstract \\1', replace: 'abstract \\1',
encoding: 'UTF-8') { encoding: 'UTF-8') {
fileset(dir: outputPath, includes: 'PainlessLexer.java') fileset(dir: outputPath, includes: 'PainlessLexer.java')
} }
// nuke timestamps/filenames in generated files // nuke timestamps/filenames in generated files
ant.replaceregexp(match: '\\Q// Generated from \\E.*', ant.replaceregexp(match: '\\Q// Generated from \\E.*',
replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT', replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT',
encoding: 'UTF-8') { encoding: 'UTF-8') {
fileset(dir: outputPath, includes: 'Painless*.java') fileset(dir: outputPath, includes: 'Painless*.java')
} }
// remove tabs in antlr generated files // remove tabs in antlr generated files

View file

@ -18,6 +18,6 @@
*/ */
esplugin { esplugin {
description 'Adds advanced field mappers' description 'Adds advanced field mappers'
classname 'org.elasticsearch.index.mapper.MapperExtrasPlugin' classname 'org.elasticsearch.index.mapper.MapperExtrasPlugin'
} }

View file

@ -18,12 +18,12 @@
*/ */
esplugin { esplugin {
description 'Percolator module adds capability to index queries and query these queries by specifying documents' description 'Percolator module adds capability to index queries and query these queries by specifying documents'
classname 'org.elasticsearch.percolator.PercolatorPlugin' classname 'org.elasticsearch.percolator.PercolatorPlugin'
} }
dependencies { dependencies {
testCompile project(path: ':modules:parent-join', configuration: 'runtime') testCompile project(path: ':modules:parent-join', configuration: 'runtime')
} }
dependencyLicenses { dependencyLicenses {

View file

@ -55,7 +55,7 @@ dependencies {
testCompile project(path: ':modules:parent-join', configuration: 'runtime') testCompile project(path: ':modules:parent-join', configuration: 'runtime')
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// Commons logging // Commons logging
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextListener',
@ -65,8 +65,8 @@ thirdPartyAudit.ignoreMissingClasses (
) )
forbiddenPatterns { forbiddenPatterns {
// PKCS#12 file are not UTF-8 // PKCS#12 file are not UTF-8
exclude '**/*.p12' exclude '**/*.p12'
} }
// Support for testing reindex-from-remote against old Elasticsearch versions // Support for testing reindex-from-remote against old Elasticsearch versions
@ -116,12 +116,12 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
dependsOn project.configurations.oldesFixture dependsOn project.configurations.oldesFixture
dependsOn unzip dependsOn unzip
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }" env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}"
env 'JAVA_HOME', "${ -> getJavaHome(it, 8)}" env 'JAVA_HOME', "${-> getJavaHome(it, 8)}"
args 'oldes.OldElasticsearch', args 'oldes.OldElasticsearch',
baseDir, baseDir,
unzip.temporaryDir, unzip.temporaryDir,
version == '090' version == '090'
waitCondition = { fixture, ant -> waitCondition = { fixture, ant ->
// the fixture writes the ports file when Elasticsearch's HTTP service // the fixture writes the ports file when Elasticsearch's HTTP service
// is ready, so we can just wait for the file to exist // is ready, so we can just wait for the file to exist
@ -135,8 +135,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
systemProperty "tests.fromOld", "true" systemProperty "tests.fromOld", "true"
/* Use a closure on the string to delay evaluation until right before we /* Use a closure on the string to delay evaluation until right before we
* run the integration tests so that we can be sure that the file is * run the integration tests so that we can be sure that the file is
* ready. */ * ready. */
nonInputProperties.systemProperty "es${version}.port", "${ -> fixture.addressAndPort }" nonInputProperties.systemProperty "es${version}.port", "${-> fixture.addressAndPort}"
} }
} }
} }

View file

@ -22,8 +22,8 @@ import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.test.AntFixture import org.elasticsearch.gradle.test.AntFixture
esplugin { esplugin {
description 'Module for URL repository' description 'Module for URL repository'
classname 'org.elasticsearch.plugin.repository.url.URLRepositoryPlugin' classname 'org.elasticsearch.plugin.repository.url.URLRepositoryPlugin'
} }
// This directory is shared between two URL repositories and one FS repository in YAML integration tests // This directory is shared between two URL repositories and one FS repository in YAML integration tests
@ -31,21 +31,23 @@ File repositoryDir = new File(project.buildDir, "shared-repository")
/** A task to start the URLFixture which exposes the repositoryDir over HTTP **/ /** A task to start the URLFixture which exposes the repositoryDir over HTTP **/
task urlFixture(type: AntFixture) { task urlFixture(type: AntFixture) {
dependsOn testClasses dependsOn testClasses
doFirst { doFirst {
repositoryDir.mkdirs() repositoryDir.mkdirs()
} }
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.repositories.url.URLFixture', baseDir, "${repositoryDir.absolutePath}" args 'org.elasticsearch.repositories.url.URLFixture', baseDir, "${repositoryDir.absolutePath}"
} }
integTest { integTest {
dependsOn urlFixture dependsOn urlFixture
} }
testClusters.integTest { testClusters.integTest {
// repositoryDir is used by a FS repository to create snapshots // repositoryDir is used by a FS repository to create snapshots
setting 'path.repo', "${repositoryDir.absolutePath}", PropertyNormalization.IGNORE_VALUE setting 'path.repo', "${repositoryDir.absolutePath}", PropertyNormalization.IGNORE_VALUE
// repositoryDir is used by two URL repositories to restore snapshots // repositoryDir is used by two URL repositories to restore snapshots
setting 'repositories.url.allowed_urls', { "http://snapshot.test*,http://${urlFixture.addressAndPort}" }, PropertyNormalization.IGNORE_VALUE setting 'repositories.url.allowed_urls', {
"http://snapshot.test*,http://${urlFixture.addressAndPort}"
}, PropertyNormalization.IGNORE_VALUE
} }

View file

@ -18,8 +18,8 @@
*/ */
esplugin { esplugin {
description 'Integrates Elasticsearch with systemd' description 'Integrates Elasticsearch with systemd'
classname 'org.elasticsearch.systemd.SystemdPlugin' classname 'org.elasticsearch.systemd.SystemdPlugin'
} }
integTest.enabled = false integTest.enabled = false

View file

@ -28,21 +28,21 @@ import org.elasticsearch.gradle.test.RestIntegTestTask
* maybe figure out a way to run all tests from core with netty4/network? * maybe figure out a way to run all tests from core with netty4/network?
*/ */
esplugin { esplugin {
description 'Netty 4 based transport implementation' description 'Netty 4 based transport implementation'
classname 'org.elasticsearch.transport.Netty4Plugin' classname 'org.elasticsearch.transport.Netty4Plugin'
} }
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
dependencies { dependencies {
// network stack // network stack
compile "io.netty:netty-buffer:${versions.netty}" compile "io.netty:netty-buffer:${versions.netty}"
compile "io.netty:netty-codec:${versions.netty}" compile "io.netty:netty-codec:${versions.netty}"
compile "io.netty:netty-codec-http:${versions.netty}" compile "io.netty:netty-codec-http:${versions.netty}"
compile "io.netty:netty-common:${versions.netty}" compile "io.netty:netty-common:${versions.netty}"
compile "io.netty:netty-handler:${versions.netty}" compile "io.netty:netty-handler:${versions.netty}"
compile "io.netty:netty-resolver:${versions.netty}" compile "io.netty:netty-resolver:${versions.netty}"
compile "io.netty:netty-transport:${versions.netty}" compile "io.netty:netty-transport:${versions.netty}"
} }
dependencyLicenses { dependencyLicenses {
@ -50,19 +50,19 @@ dependencyLicenses {
} }
test { test {
/* /*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty. * other if we allow them to set the number of available processors as it's set-once in Netty.
*/ */
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'es.set.netty.runtime.available.processors', 'false'
} }
integTestRunner { integTestRunner {
/* /*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty. * other if we allow them to set the number of available processors as it's set-once in Netty.
*/ */
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'es.set.netty.runtime.available.processors', 'false'
} }
TaskProvider<Test> pooledTest = tasks.register("pooledTest", Test) { TaskProvider<Test> pooledTest = tasks.register("pooledTest", Test) {
@ -82,126 +82,126 @@ testClusters.pooledIntegTest {
check.dependsOn(pooledTest, pooledIntegTest) check.dependsOn(pooledTest, pooledIntegTest)
thirdPartyAudit { thirdPartyAudit {
ignoreMissingClasses ( ignoreMissingClasses(
// classes are missing // classes are missing
// from io.netty.handler.codec.protobuf.ProtobufDecoder (netty) // from io.netty.handler.codec.protobuf.ProtobufDecoder (netty)
'com.google.protobuf.ExtensionRegistry', 'com.google.protobuf.ExtensionRegistry',
'com.google.protobuf.MessageLite$Builder', 'com.google.protobuf.MessageLite$Builder',
'com.google.protobuf.MessageLite', 'com.google.protobuf.MessageLite',
'com.google.protobuf.Parser', 'com.google.protobuf.Parser',
// from io.netty.logging.CommonsLoggerFactory (netty) // from io.netty.logging.CommonsLoggerFactory (netty)
'org.apache.commons.logging.Log', 'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory', 'org.apache.commons.logging.LogFactory',
// from Log4j (deliberate, Netty will fallback to Log4j 2) // from Log4j (deliberate, Netty will fallback to Log4j 2)
'org.apache.log4j.Level', 'org.apache.log4j.Level',
'org.apache.log4j.Logger', 'org.apache.log4j.Logger',
// from io.netty.handler.ssl.OpenSslEngine (netty) // from io.netty.handler.ssl.OpenSslEngine (netty)
'io.netty.internal.tcnative.Buffer', 'io.netty.internal.tcnative.Buffer',
'io.netty.internal.tcnative.Library', 'io.netty.internal.tcnative.Library',
'io.netty.internal.tcnative.SSL', 'io.netty.internal.tcnative.SSL',
'io.netty.internal.tcnative.SSLContext', 'io.netty.internal.tcnative.SSLContext',
'io.netty.internal.tcnative.SSLPrivateKeyMethod', 'io.netty.internal.tcnative.SSLPrivateKeyMethod',
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
'org.bouncycastle.jce.provider.BouncyCastleProvider', 'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty) // from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider',
'org.eclipse.jetty.npn.NextProtoNego', 'org.eclipse.jetty.npn.NextProtoNego',
// from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) // from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty)
'org.jboss.marshalling.ByteInput', 'org.jboss.marshalling.ByteInput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) // from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty)
'org.jboss.marshalling.ByteOutput', 'org.jboss.marshalling.ByteOutput',
// from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) // from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty)
'org.jboss.marshalling.Marshaller', 'org.jboss.marshalling.Marshaller',
// from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) // from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty)
'org.jboss.marshalling.MarshallerFactory', 'org.jboss.marshalling.MarshallerFactory',
'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller', 'org.jboss.marshalling.Unmarshaller',
// from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.helpers.FormattingTuple', 'org.slf4j.helpers.FormattingTuple',
'org.slf4j.helpers.MessageFormatter', 'org.slf4j.helpers.MessageFormatter',
'org.slf4j.Logger', 'org.slf4j.Logger',
'org.slf4j.LoggerFactory', 'org.slf4j.LoggerFactory',
'org.slf4j.spi.LocationAwareLogger', 'org.slf4j.spi.LocationAwareLogger',
'com.google.protobuf.ExtensionRegistryLite', 'com.google.protobuf.ExtensionRegistryLite',
'com.google.protobuf.MessageLiteOrBuilder', 'com.google.protobuf.MessageLiteOrBuilder',
'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano', 'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater', 'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater', 'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType', 'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib', 'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler', 'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder', 'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder', 'com.ning.compress.lzf.ChunkEncoder',
'com.ning.compress.lzf.LZFEncoder', 'com.ning.compress.lzf.LZFEncoder',
'com.ning.compress.lzf.util.ChunkDecoderFactory', 'com.ning.compress.lzf.util.ChunkDecoderFactory',
'com.ning.compress.lzf.util.ChunkEncoderFactory', 'com.ning.compress.lzf.util.ChunkEncoderFactory',
'lzma.sdk.lzma.Encoder', 'lzma.sdk.lzma.Encoder',
'net.jpountz.lz4.LZ4Compressor', 'net.jpountz.lz4.LZ4Compressor',
'net.jpountz.lz4.LZ4Factory', 'net.jpountz.lz4.LZ4Factory',
'net.jpountz.lz4.LZ4FastDecompressor', 'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.XXHash32', 'net.jpountz.xxhash.XXHash32',
'net.jpountz.xxhash.XXHashFactory', 'net.jpountz.xxhash.XXHashFactory',
'io.netty.internal.tcnative.CertificateCallback', 'io.netty.internal.tcnative.CertificateCallback',
'io.netty.internal.tcnative.CertificateVerifier', 'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey', 'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher', 'io.netty.internal.tcnative.SniHostNameMatcher',
'io.netty.internal.tcnative.SSL', 'io.netty.internal.tcnative.SSL',
'org.eclipse.jetty.alpn.ALPN$ClientProvider', 'org.eclipse.jetty.alpn.ALPN$ClientProvider',
'org.eclipse.jetty.alpn.ALPN$ServerProvider', 'org.eclipse.jetty.alpn.ALPN$ServerProvider',
'org.eclipse.jetty.alpn.ALPN', 'org.eclipse.jetty.alpn.ALPN',
'org.conscrypt.AllocatedBuffer', 'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator', 'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt', 'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener' 'org.conscrypt.HandshakeListener'
) )
ignoreViolations ( ignoreViolations(
'io.netty.util.internal.PlatformDependent0', 'io.netty.util.internal.PlatformDependent0',
'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$1',
'io.netty.util.internal.PlatformDependent0$2', 'io.netty.util.internal.PlatformDependent0$2',
'io.netty.util.internal.PlatformDependent0$3', 'io.netty.util.internal.PlatformDependent0$3',
'io.netty.util.internal.PlatformDependent0$5', 'io.netty.util.internal.PlatformDependent0$5',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator' 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator'
) )
} }
rootProject.globalInfo.ready { rootProject.globalInfo.ready {
if (BuildParams.inFipsJvm == false) { if (BuildParams.inFipsJvm == false) {
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
// a FIPS JVM with BouncyCastleFIPS Provider // a FIPS JVM with BouncyCastleFIPS Provider
thirdPartyAudit.ignoreMissingClasses( thirdPartyAudit.ignoreMissingClasses(
'org.bouncycastle.asn1.x500.X500Name' 'org.bouncycastle.asn1.x500.X500Name'
) )
} }
} }

View file

@ -34,7 +34,7 @@ dependencyLicenses {
mapping from: /morfologik-.*/, to: 'lucene' mapping from: /morfologik-.*/, to: 'lucene'
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// we don't use the morfologik-fsa polish stemmer // we don't use the morfologik-fsa polish stemmer
'morfologik.stemming.polish.PolishStemmer' 'morfologik.stemming.polish.PolishStemmer'
) )

View file

@ -17,8 +17,8 @@
* under the License. * under the License.
*/ */
subprojects { subprojects {
apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.testclusters'
} }
// only configure immediate children of plugins dir // only configure immediate children of plugins dir
@ -29,7 +29,7 @@ configure(subprojects.findAll { it.parent.path == project.path }) {
esplugin { esplugin {
// for local ES plugins, the name of the plugin is the same as the directory // for local ES plugins, the name of the plugin is the same as the directory
name project.name name project.name
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt') noticeFile rootProject.file('NOTICE.txt')
} }

View file

@ -26,7 +26,7 @@ esplugin {
} }
versions << [ versions << [
'azure': '0.9.3', 'azure' : '0.9.3',
'jersey': '1.13' 'jersey': '1.13'
] ]
@ -45,15 +45,15 @@ dependencies {
compile "com.sun.jersey:jersey-client:${versions.jersey}" compile "com.sun.jersey:jersey-client:${versions.jersey}"
compile "com.sun.jersey:jersey-core:${versions.jersey}" compile "com.sun.jersey:jersey-core:${versions.jersey}"
compile "com.sun.jersey:jersey-json:${versions.jersey}" compile "com.sun.jersey:jersey-json:${versions.jersey}"
compile 'org.codehaus.jettison:jettison:1.1' compile 'org.codehaus.jettison:jettison:1.1'
compile 'com.sun.xml.bind:jaxb-impl:2.2.3-1' compile 'com.sun.xml.bind:jaxb-impl:2.2.3-1'
compile 'org.codehaus.jackson:jackson-core-asl:1.9.2' compile 'org.codehaus.jackson:jackson-core-asl:1.9.2'
compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.2' compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.2'
compile 'org.codehaus.jackson:jackson-jaxrs:1.9.2' compile 'org.codehaus.jackson:jackson-jaxrs:1.9.2'
compile 'org.codehaus.jackson:jackson-xc:1.9.2' compile 'org.codehaus.jackson:jackson-xc:1.9.2'
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
// and whitelist this hack in JarHell // and whitelist this hack in JarHell
compile 'javax.xml.bind:jaxb-api:2.2.2' compile 'javax.xml.bind:jaxb-api:2.2.2'
} }
@ -73,14 +73,14 @@ task createKey(type: LoggedExec) {
executable = "${BuildParams.runtimeJavaHome}/bin/keytool" executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
args '-genkey', args '-genkey',
'-alias', 'test-node', '-alias', 'test-node',
'-keystore', keystore, '-keystore', keystore,
'-keyalg', 'RSA', '-keyalg', 'RSA',
'-keysize', '2048', '-keysize', '2048',
'-validity', '712', '-validity', '712',
'-dname', 'CN=' + host, '-dname', 'CN=' + host,
'-keypass', 'keypass', '-keypass', 'keypass',
'-storepass', 'keypass' '-storepass', 'keypass'
} }
// add keystore to test classpath: it expects it there // add keystore to test classpath: it expects it there
@ -101,7 +101,7 @@ dependencyLicenses {
mapping from: /jaxb-.*/, to: 'jaxb' mapping from: /jaxb-.*/, to: 'jaxb'
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
'javax.activation.ActivationDataFlavor', 'javax.activation.ActivationDataFlavor',
'javax.activation.DataContentHandler', 'javax.activation.DataContentHandler',
'javax.activation.DataHandler', 'javax.activation.DataHandler',
@ -110,36 +110,36 @@ thirdPartyAudit.ignoreMissingClasses (
'javax.activation.FileTypeMap', 'javax.activation.FileTypeMap',
'javax.activation.MimeType', 'javax.activation.MimeType',
'javax.activation.MimeTypeParseException', 'javax.activation.MimeTypeParseException',
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',
'org.apache.log.Logger', 'org.apache.log.Logger',
'org.eclipse.persistence.descriptors.ClassDescriptor', 'org.eclipse.persistence.descriptors.ClassDescriptor',
'org.eclipse.persistence.internal.oxm.MappingNodeValue', 'org.eclipse.persistence.internal.oxm.MappingNodeValue',
'org.eclipse.persistence.internal.oxm.TreeObjectBuilder', 'org.eclipse.persistence.internal.oxm.TreeObjectBuilder',
'org.eclipse.persistence.internal.oxm.XPathFragment', 'org.eclipse.persistence.internal.oxm.XPathFragment',
'org.eclipse.persistence.internal.oxm.XPathNode', 'org.eclipse.persistence.internal.oxm.XPathNode',
'org.eclipse.persistence.internal.queries.ContainerPolicy', 'org.eclipse.persistence.internal.queries.ContainerPolicy',
'org.eclipse.persistence.jaxb.JAXBContext', 'org.eclipse.persistence.jaxb.JAXBContext',
'org.eclipse.persistence.jaxb.JAXBHelper', 'org.eclipse.persistence.jaxb.JAXBHelper',
'org.eclipse.persistence.mappings.DatabaseMapping', 'org.eclipse.persistence.mappings.DatabaseMapping',
'org.eclipse.persistence.mappings.converters.TypeConversionConverter', 'org.eclipse.persistence.mappings.converters.TypeConversionConverter',
'org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping', 'org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping',
'org.eclipse.persistence.oxm.XMLContext', 'org.eclipse.persistence.oxm.XMLContext',
'org.eclipse.persistence.oxm.XMLDescriptor', 'org.eclipse.persistence.oxm.XMLDescriptor',
'org.eclipse.persistence.oxm.XMLField', 'org.eclipse.persistence.oxm.XMLField',
'org.eclipse.persistence.oxm.mappings.XMLCompositeCollectionMapping', 'org.eclipse.persistence.oxm.mappings.XMLCompositeCollectionMapping',
'org.eclipse.persistence.sessions.DatabaseSession', 'org.eclipse.persistence.sessions.DatabaseSession',
'org.jvnet.fastinfoset.VocabularyApplicationData', 'org.jvnet.fastinfoset.VocabularyApplicationData',
'org.jvnet.staxex.Base64Data', 'org.jvnet.staxex.Base64Data',
'org.jvnet.staxex.XMLStreamReaderEx', 'org.jvnet.staxex.XMLStreamReaderEx',
'org.jvnet.staxex.XMLStreamWriterEx', 'org.jvnet.staxex.XMLStreamWriterEx',
'org.osgi.framework.Bundle', 'org.osgi.framework.Bundle',
'org.osgi.framework.BundleActivator', 'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext', 'org.osgi.framework.BundleContext',
'org.osgi.framework.BundleEvent', 'org.osgi.framework.BundleEvent',
'org.osgi.framework.SynchronousBundleListener', 'org.osgi.framework.SynchronousBundleListener',
'com.sun.xml.fastinfoset.stax.StAXDocumentParser', 'com.sun.xml.fastinfoset.stax.StAXDocumentParser',
'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer' 'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer'
) )

View file

@ -57,11 +57,11 @@ task writeTestJavaPolicy {
} }
final File javaPolicy = file("${tmp}/java.policy") final File javaPolicy = file("${tmp}/java.policy")
javaPolicy.write( javaPolicy.write(
[ [
"grant {", "grant {",
" permission java.util.PropertyPermission \"com.amazonaws.sdk.ec2MetadataServiceEndpointOverride\", \"write\";", " permission java.util.PropertyPermission \"com.amazonaws.sdk.ec2MetadataServiceEndpointOverride\", \"write\";",
"};" "};"
].join("\n")) ].join("\n"))
} }
} }
@ -69,7 +69,7 @@ test {
dependsOn writeTestJavaPolicy dependsOn writeTestJavaPolicy
// this is needed for insecure plugins, remove if possible! // this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name systemProperty 'tests.artifact', project.name
// this is needed to manipulate com.amazonaws.sdk.ec2MetadataServiceEndpointOverride system property // this is needed to manipulate com.amazonaws.sdk.ec2MetadataServiceEndpointOverride system property
// it is better rather disable security manager at all with `systemProperty 'tests.security.manager', 'false'` // it is better rather disable security manager at all with `systemProperty 'tests.security.manager', 'false'`
systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy"
@ -80,7 +80,7 @@ check {
dependsOn 'qa:amazon-ec2:check' dependsOn 'qa:amazon-ec2:check'
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// classes are missing // classes are missing
'com.amazonaws.jmespath.JmesPathEvaluationVisitor', 'com.amazonaws.jmespath.JmesPathEvaluationVisitor',
'com.amazonaws.jmespath.JmesPathExpression', 'com.amazonaws.jmespath.JmesPathExpression',
@ -102,14 +102,14 @@ thirdPartyAudit.ignoreMissingClasses (
'software.amazon.ion.system.IonSystemBuilder', 'software.amazon.ion.system.IonSystemBuilder',
'software.amazon.ion.system.IonTextWriterBuilder', 'software.amazon.ion.system.IonTextWriterBuilder',
'software.amazon.ion.system.IonWriterBuilder', 'software.amazon.ion.system.IonWriterBuilder',
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',
'org.apache.log.Logger' 'org.apache.log.Logger'
) )
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
'javax.xml.bind.DatatypeConverter', 'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.JAXBContext' 'javax.xml.bind.JAXBContext'
) )

View file

@ -29,18 +29,18 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-test'
dependencies { dependencies {
testCompile project(path: ':plugins:discovery-ec2', configuration: 'runtime') testCompile project(path: ':plugins:discovery-ec2', configuration: 'runtime')
} }
final int ec2NumberOfNodes = 3 final int ec2NumberOfNodes = 3
Map<String, Object> expansions = [ Map<String, Object> expansions = [
'expected_nodes': ec2NumberOfNodes 'expected_nodes': ec2NumberOfNodes
] ]
processTestResources { processTestResources {
inputs.properties(expansions) inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
} }
// disable default test task, use spezialized ones below // disable default test task, use spezialized ones below
@ -58,55 +58,55 @@ integTest.enabled = false
* custom Java security policy to work. * custom Java security policy to work.
*/ */
['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> ['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action ->
AntFixture fixture = tasks.create(name: "ec2Fixture${action}", type: AntFixture) { AntFixture fixture = tasks.create(name: "ec2Fixture${action}", type: AntFixture) {
dependsOn compileTestJava dependsOn compileTestJava
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/integTest${action}-1/config/unicast_hosts.txt" args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/integTest${action}-1/config/unicast_hosts.txt"
} }
tasks.create(name: "integTest${action}", type: RestIntegTestTask) { tasks.create(name: "integTest${action}", type: RestIntegTestTask) {
dependsOn fixture, project(':plugins:discovery-ec2').bundlePlugin dependsOn fixture, project(':plugins:discovery-ec2').bundlePlugin
} }
check.dependsOn("integTest${action}") check.dependsOn("integTest${action}")
testClusters."integTest${action}" { testClusters."integTest${action}" {
numberOfNodes = ec2NumberOfNodes numberOfNodes = ec2NumberOfNodes
plugin file(project(':plugins:discovery-ec2').bundlePlugin.archiveFile) plugin file(project(':plugins:discovery-ec2').bundlePlugin.archiveFile)
setting 'discovery.seed_providers', 'ec2' setting 'discovery.seed_providers', 'ec2'
setting 'network.host', '_ec2_' setting 'network.host', '_ec2_'
setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
} }
} }
// Extra config for KeyStore // Extra config for KeyStore
testClusters.integTestKeyStore { testClusters.integTestKeyStore {
keystore 'discovery.ec2.access_key', 'ec2_integration_test_access_key' keystore 'discovery.ec2.access_key', 'ec2_integration_test_access_key'
keystore 'discovery.ec2.secret_key', 'ec2_integration_test_secret_key' keystore 'discovery.ec2.secret_key', 'ec2_integration_test_secret_key'
} }
// Extra config for EnvVariables // Extra config for EnvVariables
testClusters.integTestEnvVariables { testClusters.integTestEnvVariables {
environment 'AWS_ACCESS_KEY_ID', 'ec2_integration_test_access_key' environment 'AWS_ACCESS_KEY_ID', 'ec2_integration_test_access_key'
environment 'AWS_SECRET_ACCESS_KEY', 'ec2_integration_test_secret_key' environment 'AWS_SECRET_ACCESS_KEY', 'ec2_integration_test_secret_key'
} }
// Extra config for SystemProperties // Extra config for SystemProperties
testClusters.integTestSystemProperties { testClusters.integTestSystemProperties {
systemProperty 'aws.accessKeyId', 'ec2_integration_test_access_key' systemProperty 'aws.accessKeyId', 'ec2_integration_test_access_key'
systemProperty 'aws.secretKey', 'ec2_integration_test_secret_key' systemProperty 'aws.secretKey', 'ec2_integration_test_secret_key'
} }
// Extra config for ContainerCredentials // Extra config for ContainerCredentials
ec2FixtureContainerCredentials.env 'ACTIVATE_CONTAINER_CREDENTIALS', true ec2FixtureContainerCredentials.env 'ACTIVATE_CONTAINER_CREDENTIALS', true
testClusters.integTestContainerCredentials { testClusters.integTestContainerCredentials {
environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI',
{ "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE { "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE
} }
// Extra config for InstanceProfile // Extra config for InstanceProfile

View file

@ -32,16 +32,16 @@ check {
test { test {
// this is needed for insecure plugins, remove if possible! // this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name systemProperty 'tests.artifact', project.name
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// classes are missing // classes are missing
'com.google.common.base.Splitter', 'com.google.common.base.Splitter',
'com.google.common.collect.Lists', 'com.google.common.collect.Lists',
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',
'org.apache.log.Logger' 'org.apache.log.Logger'
) )

View file

@ -30,41 +30,41 @@ apply plugin: 'elasticsearch.rest-test'
final int gceNumberOfNodes = 3 final int gceNumberOfNodes = 3
dependencies { dependencies {
testCompile project(path: ':plugins:discovery-gce', configuration: 'runtime') testCompile project(path: ':plugins:discovery-gce', configuration: 'runtime')
} }
/** A task to start the GCEFixture which emulates a GCE service **/ /** A task to start the GCEFixture which emulates a GCE service **/
task gceFixture(type: AntFixture) { task gceFixture(type: AntFixture) {
dependsOn compileTestJava dependsOn compileTestJava
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, "${buildDir}/testclusters/integTest-1/config/unicast_hosts.txt" args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, "${buildDir}/testclusters/integTest-1/config/unicast_hosts.txt"
} }
Map<String, Object> expansions = [ Map<String, Object> expansions = [
'expected_nodes': gceNumberOfNodes 'expected_nodes': gceNumberOfNodes
] ]
processTestResources { processTestResources {
inputs.properties(expansions) inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
} }
integTest { integTest {
dependsOn gceFixture, project(':plugins:discovery-gce').bundlePlugin dependsOn gceFixture, project(':plugins:discovery-gce').bundlePlugin
} }
testClusters.integTest { testClusters.integTest {
numberOfNodes = gceNumberOfNodes numberOfNodes = gceNumberOfNodes
plugin file(project(':plugins:discovery-gce').bundlePlugin.archiveFile) plugin file(project(':plugins:discovery-gce').bundlePlugin.archiveFile)
// use gce fixture for Auth calls instead of http://metadata.google.internal // use gce fixture for Auth calls instead of http://metadata.google.internal
environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`) // allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
systemProperty 'es.allow_reroute_gce_settings', 'true' systemProperty 'es.allow_reroute_gce_settings', 'true'
setting 'discovery.seed_providers', 'gce' setting 'discovery.seed_providers', 'gce'
// use gce fixture for metadata server calls instead of http://metadata.google.internal // use gce fixture for metadata server calls instead of http://metadata.google.internal
setting 'cloud.gce.host', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE setting 'cloud.gce.host', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// use gce fixture for API calls instead of https://www.googleapis.com // use gce fixture for API calls instead of https://www.googleapis.com
setting 'cloud.gce.root_url', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE setting 'cloud.gce.root_url', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
} }

View file

@ -9,20 +9,20 @@ gradle.projectsEvaluated {
} }
configure(project('painless-whitelist')) { configure(project('painless-whitelist')) {
configurations.all { configurations.all {
resolutionStrategy.dependencySubstitution { resolutionStrategy.dependencySubstitution {
substitute module('org.elasticsearch.plugin:elasticsearch-scripting-painless-spi') with project(':modules:lang-painless:spi') substitute module('org.elasticsearch.plugin:elasticsearch-scripting-painless-spi') with project(':modules:lang-painless:spi')
substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage') substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage')
}
} }
}
} }
configure(project('security-authorization-engine')) { configure(project('security-authorization-engine')) {
configurations.all { configurations.all {
resolutionStrategy.dependencySubstitution { resolutionStrategy.dependencySubstitution {
substitute module('org.elasticsearch.plugin:x-pack-core') with project(':x-pack:plugin:core') substitute module('org.elasticsearch.plugin:x-pack-core') with project(':x-pack:plugin:core')
substitute module('org.elasticsearch.client:elasticsearch-rest-high-level-client') with project(':client:rest-high-level') substitute module('org.elasticsearch.client:elasticsearch-rest-high-level-client') with project(':client:rest-high-level')
substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage') substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage')
}
} }
} }
}

View file

@ -20,15 +20,15 @@ apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
name 'custom-suggester' name 'custom-suggester'
description 'An example plugin showing how to write and register a custom suggester' description 'An example plugin showing how to write and register a custom suggester'
classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt') noticeFile rootProject.file('NOTICE.txt')
} }
testClusters.integTest { testClusters.integTest {
numberOfNodes = 2 numberOfNodes = 2
} }
// this plugin has no unit tests, only rest tests // this plugin has no unit tests, only rest tests

View file

@ -34,7 +34,7 @@ test.enabled = false
task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) { task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn testClasses dependsOn testClasses
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST' args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST'
} }
@ -42,7 +42,7 @@ task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) {
integTest { integTest {
dependsOn exampleFixture dependsOn exampleFixture
runner { runner {
nonInputProperties.systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" nonInputProperties.systemProperty 'external.address', "${-> exampleFixture.addressAndPort}"
} }
} }
@ -50,4 +50,4 @@ testingConventions.naming {
IT { IT {
baseClass 'org.elasticsearch.test.ESTestCase' baseClass 'org.elasticsearch.test.ESTestCase'
} }
} }

View file

@ -25,9 +25,9 @@ esplugin {
} }
versions << [ versions << [
'tika': '1.22', 'tika' : '1.22',
'pdfbox': '2.0.16', 'pdfbox': '2.0.16',
'poi': '4.0.1', 'poi' : '4.0.1',
'mime4j': '0.8.3' 'mime4j': '0.8.3'
] ]
@ -84,12 +84,12 @@ forbiddenPatterns {
exclude '**/*.vsdx' exclude '**/*.vsdx'
} }
thirdPartyAudit{ thirdPartyAudit {
ignoreMissingClasses() ignoreMissingClasses()
} }
thirdPartyAudit.onlyIf { thirdPartyAudit.onlyIf {
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
// rather than provide a long list of exclusions, disable the check on FIPS. // rather than provide a long list of exclusions, disable the check on FIPS.
BuildParams.inFipsJvm == false BuildParams.inFipsJvm == false
} }

View file

@ -41,26 +41,26 @@ dependencyLicenses {
} }
thirdPartyAudit { thirdPartyAudit {
ignoreMissingClasses ( ignoreMissingClasses(
// Optional and not enabled by Elasticsearch // Optional and not enabled by Elasticsearch
'org.slf4j.Logger', 'org.slf4j.Logger',
'org.slf4j.LoggerFactory' 'org.slf4j.LoggerFactory'
) )
ignoreViolations ( ignoreViolations(
// uses internal java api: sun.misc.Unsafe // uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell', 'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1'
) )
} }
check { check {
@ -69,10 +69,10 @@ check {
} }
testClusters { testClusters {
integTest { integTest {
keystore 'azure.client.integration_test.account', 'azure_account' keystore 'azure.client.integration_test.account', 'azure_account'
keystore 'azure.client.integration_test.key', 'azure_key' keystore 'azure.client.integration_test.key', 'azure_key'
} }
} }
String azureAccount = System.getenv("azure_storage_account") String azureAccount = System.getenv("azure_storage_account")

View file

@ -40,49 +40,49 @@ String azureBasePath = System.getenv("azure_storage_base_path")
String azureSasToken = System.getenv("azure_storage_sas_token") String azureSasToken = System.getenv("azure_storage_sas_token")
if (!azureAccount && !azureKey && !azureContainer && !azureBasePath && !azureSasToken) { if (!azureAccount && !azureKey && !azureContainer && !azureBasePath && !azureSasToken) {
azureAccount = 'azure_integration_test_account' azureAccount = 'azure_integration_test_account'
azureKey = 'YXp1cmVfaW50ZWdyYXRpb25fdGVzdF9rZXk=' // The key is "azure_integration_test_key" encoded using base64 azureKey = 'YXp1cmVfaW50ZWdyYXRpb25fdGVzdF9rZXk=' // The key is "azure_integration_test_key" encoded using base64
azureContainer = 'container' azureContainer = 'container'
azureBasePath = '' azureBasePath = ''
azureSasToken = '' azureSasToken = ''
useFixture = true useFixture = true
} }
Map<String, Object> expansions = [ Map<String, Object> expansions = [
'container': azureContainer, 'container': azureContainer,
'base_path': azureBasePath + "_integration_tests" 'base_path': azureBasePath + "_integration_tests"
] ]
processTestResources { processTestResources {
inputs.properties(expansions) inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
} }
integTest { integTest {
dependsOn project(':plugins:repository-azure').bundlePlugin dependsOn project(':plugins:repository-azure').bundlePlugin
} }
testClusters.integTest { testClusters.integTest {
plugin file(project(':plugins:repository-azure').bundlePlugin.archiveFile) plugin file(project(':plugins:repository-azure').bundlePlugin.archiveFile)
keystore 'azure.client.integration_test.account', azureAccount keystore 'azure.client.integration_test.account', azureAccount
if (azureKey != null && azureKey.isEmpty() == false) { if (azureKey != null && azureKey.isEmpty() == false) {
keystore 'azure.client.integration_test.key', azureKey keystore 'azure.client.integration_test.key', azureKey
} }
if (azureSasToken != null && azureSasToken.isEmpty() == false) { if (azureSasToken != null && azureSasToken.isEmpty() == false) {
keystore 'azure.client.integration_test.sas_token', azureSasToken keystore 'azure.client.integration_test.sas_token', azureSasToken
} }
if (useFixture) { if (useFixture) {
def azureAddress = { def azureAddress = {
int ephemeralPort = project(':test:fixtures:azure-fixture').postProcessFixture.ext."test.fixtures.azure-fixture.tcp.8091" int ephemeralPort = project(':test:fixtures:azure-fixture').postProcessFixture.ext."test.fixtures.azure-fixture.tcp.8091"
assert ephemeralPort > 0 assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort 'http://127.0.0.1:' + ephemeralPort
}
// Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used
// in a hacky way to change the protocol and endpoint. We must fix that.
setting 'azure.client.integration_test.endpoint_suffix',
{ "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=${ -> azureAddress() }" }, IGNORE_VALUE
String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0)
setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), System.getProperty('ignore.tests.seed') == null ? DEFAULT : IGNORE_VALUE
} }
// Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used
// in a hacky way to change the protocol and endpoint. We must fix that.
setting 'azure.client.integration_test.endpoint_suffix',
{ "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=${-> azureAddress()}" }, IGNORE_VALUE
String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0)
setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), System.getProperty('ignore.tests.seed') == null ? DEFAULT : IGNORE_VALUE
}
} }

View file

@ -67,57 +67,57 @@ dependencyLicenses {
} }
thirdPartyAudit { thirdPartyAudit {
ignoreViolations ( ignoreViolations(
// uses internal java api: sun.misc.Unsafe // uses internal java api: sun.misc.Unsafe
'com.google.protobuf.UnsafeUtil', 'com.google.protobuf.UnsafeUtil',
'com.google.protobuf.UnsafeUtil$1', 'com.google.protobuf.UnsafeUtil$1',
'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor', 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor',
'com.google.protobuf.UnsafeUtil$MemoryAccessor', 'com.google.protobuf.UnsafeUtil$MemoryAccessor',
'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell', 'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.Striped64', 'com.google.common.hash.Striped64',
'com.google.common.hash.Striped64$1', 'com.google.common.hash.Striped64$1',
'com.google.common.hash.Striped64$Cell', 'com.google.common.hash.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
) )
ignoreMissingClasses ( ignoreMissingClasses(
'com.google.appengine.api.datastore.Blob', 'com.google.appengine.api.datastore.Blob',
'com.google.appengine.api.datastore.DatastoreService', 'com.google.appengine.api.datastore.DatastoreService',
'com.google.appengine.api.datastore.DatastoreServiceFactory', 'com.google.appengine.api.datastore.DatastoreServiceFactory',
'com.google.appengine.api.datastore.Entity', 'com.google.appengine.api.datastore.Entity',
'com.google.appengine.api.datastore.Key', 'com.google.appengine.api.datastore.Key',
'com.google.appengine.api.datastore.KeyFactory', 'com.google.appengine.api.datastore.KeyFactory',
'com.google.appengine.api.datastore.PreparedQuery', 'com.google.appengine.api.datastore.PreparedQuery',
'com.google.appengine.api.datastore.Query', 'com.google.appengine.api.datastore.Query',
'com.google.appengine.api.memcache.Expiration', 'com.google.appengine.api.memcache.Expiration',
'com.google.appengine.api.memcache.MemcacheService', 'com.google.appengine.api.memcache.MemcacheService',
'com.google.appengine.api.memcache.MemcacheServiceFactory', 'com.google.appengine.api.memcache.MemcacheServiceFactory',
'com.google.appengine.api.urlfetch.FetchOptions$Builder', 'com.google.appengine.api.urlfetch.FetchOptions$Builder',
'com.google.appengine.api.urlfetch.FetchOptions', 'com.google.appengine.api.urlfetch.FetchOptions',
'com.google.appengine.api.urlfetch.HTTPHeader', 'com.google.appengine.api.urlfetch.HTTPHeader',
'com.google.appengine.api.urlfetch.HTTPMethod', 'com.google.appengine.api.urlfetch.HTTPMethod',
'com.google.appengine.api.urlfetch.HTTPRequest', 'com.google.appengine.api.urlfetch.HTTPRequest',
'com.google.appengine.api.urlfetch.HTTPResponse', 'com.google.appengine.api.urlfetch.HTTPResponse',
'com.google.appengine.api.urlfetch.URLFetchService', 'com.google.appengine.api.urlfetch.URLFetchService',
'com.google.appengine.api.urlfetch.URLFetchServiceFactory', 'com.google.appengine.api.urlfetch.URLFetchServiceFactory',
// commons-logging optional dependencies // commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',
'org.apache.log.Logger', 'org.apache.log.Logger',
// commons-logging provided dependencies // commons-logging provided dependencies
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener' 'javax.servlet.ServletContextListener'
) )
} }
check { check {

View file

@ -33,7 +33,7 @@ apply plugin: 'elasticsearch.test.fixtures'
// TODO think about flattening qa:google-cloud-storage project into parent // TODO think about flattening qa:google-cloud-storage project into parent
dependencies { dependencies {
testCompile project(path: ':plugins:repository-gcs') testCompile project(path: ':plugins:repository-gcs')
} }
testFixtures.useFixture(':test:fixtures:gcs-fixture') testFixtures.useFixture(':test:fixtures:gcs-fixture')
@ -45,100 +45,100 @@ String gcsBasePath = System.getenv("google_storage_base_path")
File serviceAccountFile = null File serviceAccountFile = null
if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) { if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) {
serviceAccountFile = new File(project.buildDir, 'generated-resources/service_account_test.json') serviceAccountFile = new File(project.buildDir, 'generated-resources/service_account_test.json')
gcsBucket = 'bucket' gcsBucket = 'bucket'
gcsBasePath = 'integration_test' gcsBasePath = 'integration_test'
useFixture = true useFixture = true
} else if (!gcsServiceAccount || !gcsBucket || !gcsBasePath) { } else if (!gcsServiceAccount || !gcsBucket || !gcsBasePath) {
throw new IllegalArgumentException("not all options specified to run tests against external GCS service are present") throw new IllegalArgumentException("not all options specified to run tests against external GCS service are present")
} else { } else {
serviceAccountFile = new File(gcsServiceAccount) serviceAccountFile = new File(gcsServiceAccount)
} }
def encodedCredentials = { def encodedCredentials = {
Base64.encoder.encodeToString(Files.readAllBytes(serviceAccountFile.toPath())) Base64.encoder.encodeToString(Files.readAllBytes(serviceAccountFile.toPath()))
} }
def fixtureAddress = { fixture -> def fixtureAddress = { fixture ->
assert useFixture : 'closure should not be used without a fixture' assert useFixture: 'closure should not be used without a fixture'
int ephemeralPort = project(':test:fixtures:gcs-fixture').postProcessFixture.ext."test.fixtures.${fixture}.tcp.80" int ephemeralPort = project(':test:fixtures:gcs-fixture').postProcessFixture.ext."test.fixtures.${fixture}.tcp.80"
assert ephemeralPort > 0 assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort 'http://127.0.0.1:' + ephemeralPort
} }
/** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ /** A service account file that points to the Google Cloud Storage service emulated by the fixture **/
task createServiceAccountFile() { task createServiceAccountFile() {
doLast { doLast {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA")
keyPairGenerator.initialize(1024) keyPairGenerator.initialize(1024)
KeyPair keyPair = keyPairGenerator.generateKeyPair() KeyPair keyPair = keyPairGenerator.generateKeyPair()
String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded()) String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded())
serviceAccountFile.parentFile.mkdirs() serviceAccountFile.parentFile.mkdirs()
serviceAccountFile.setText("{\n" + serviceAccountFile.setText("{\n" +
' "type": "service_account",\n' + ' "type": "service_account",\n' +
' "project_id": "integration_test",\n' + ' "project_id": "integration_test",\n' +
' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' +
' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' +
' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' +
' "client_id": "123456789101112130594"\n' + ' "client_id": "123456789101112130594"\n' +
'}', 'UTF-8') '}', 'UTF-8')
} }
} }
task thirdPartyTest (type: Test) { task thirdPartyTest(type: Test) {
if (useFixture) { if (useFixture) {
thirdPartyTest.dependsOn createServiceAccountFile thirdPartyTest.dependsOn createServiceAccountFile
nonInputProperties.systemProperty 'test.google.endpoint', "${ -> fixtureAddress('gcs-fixture-third-party') }" nonInputProperties.systemProperty 'test.google.endpoint', "${-> fixtureAddress('gcs-fixture-third-party')}"
nonInputProperties.systemProperty 'test.google.tokenURI', "${ -> fixtureAddress('gcs-fixture-third-party') }/o/oauth2/token" nonInputProperties.systemProperty 'test.google.tokenURI', "${-> fixtureAddress('gcs-fixture-third-party')}/o/oauth2/token"
gradle.taskGraph.whenReady { gradle.taskGraph.whenReady {
if (it.hasTask(gcsThirdPartyTests)) { if (it.hasTask(gcsThirdPartyTests)) {
throw new IllegalStateException("Tried to run third party tests but not all of the necessary environment variables " + throw new IllegalStateException("Tried to run third party tests but not all of the necessary environment variables " +
"'google_storage_service_account', 'google_storage_bucket', 'google_storage_base_path' are set.") "'google_storage_service_account', 'google_storage_bucket', 'google_storage_base_path' are set.")
} }
}
} }
}
include '**/GoogleCloudStorageThirdPartyTests.class' include '**/GoogleCloudStorageThirdPartyTests.class'
systemProperty 'tests.security.manager', false systemProperty 'tests.security.manager', false
systemProperty 'test.google.bucket', gcsBucket systemProperty 'test.google.bucket', gcsBucket
systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed
nonInputProperties.systemProperty 'test.google.account', "${ -> encodedCredentials.call() }" nonInputProperties.systemProperty 'test.google.account', "${-> encodedCredentials.call()}"
} }
task gcsThirdPartyTests { task gcsThirdPartyTests {
dependsOn check dependsOn check
} }
integTest.mustRunAfter(thirdPartyTest) integTest.mustRunAfter(thirdPartyTest)
check.dependsOn thirdPartyTest check.dependsOn thirdPartyTest
Map<String, Object> expansions = [ Map<String, Object> expansions = [
'bucket': gcsBucket, 'bucket' : gcsBucket,
'base_path': gcsBasePath + "_integration_tests" 'base_path': gcsBasePath + "_integration_tests"
] ]
processTestResources { processTestResources {
inputs.properties(expansions) inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
} }
integTest { integTest {
dependsOn project(':plugins:repository-gcs').bundlePlugin dependsOn project(':plugins:repository-gcs').bundlePlugin
} }
testClusters.integTest { testClusters.integTest {
plugin file(project(':plugins:repository-gcs').bundlePlugin.archiveFile) plugin file(project(':plugins:repository-gcs').bundlePlugin.archiveFile)
keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE
if (useFixture) { if (useFixture) {
tasks.integTest.dependsOn createServiceAccountFile tasks.integTest.dependsOn createServiceAccountFile
/* Use a closure on the string to delay evaluation until tests are executed */ /* Use a closure on the string to delay evaluation until tests are executed */
setting 'gcs.client.integration_test.endpoint', { "${ -> fixtureAddress('gcs-fixture') }" }, IGNORE_VALUE setting 'gcs.client.integration_test.endpoint', { "${-> fixtureAddress('gcs-fixture')}" }, IGNORE_VALUE
setting 'gcs.client.integration_test.token_uri', { "${ -> fixtureAddress('gcs-fixture') }/o/oauth2/token" }, IGNORE_VALUE setting 'gcs.client.integration_test.token_uri', { "${-> fixtureAddress('gcs-fixture')}/o/oauth2/token" }, IGNORE_VALUE
} else { } else {
println "Using an external service to test the repository-gcs plugin" println "Using an external service to test the repository-gcs plugin"
} }
} }

View file

@ -70,17 +70,17 @@ dependencies {
// Set the keytab files in the classpath so that we can access them from test code without the security manager // Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out. // freaking out.
if (isEclipse == false) { if (isEclipse == false) {
testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent) testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
} }
} }
normalization { normalization {
runtimeClasspath { runtimeClasspath {
// ignore generated keytab files for the purposes of build avoidance // ignore generated keytab files for the purposes of build avoidance
ignore '*.keytab' ignore '*.keytab'
// ignore fixture ports file which is on the classpath primarily to pacify the security manager // ignore fixture ports file which is on the classpath primarily to pacify the security manager
ignore '*HdfsFixture/**' ignore '*HdfsFixture/**'
} }
} }
dependencyLicenses { dependencyLicenses {
@ -95,7 +95,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) { project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
maxWaitInSeconds 60 maxWaitInSeconds 60
onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false } onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false }
waitCondition = { fixture, ant -> waitCondition = { fixture, ant ->
@ -122,7 +122,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
miniHDFSArgs.add( miniHDFSArgs.add(
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
) )
} }
@ -154,15 +154,15 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
if (integTestTaskName.contains("Ha")) { if (integTestTaskName.contains("Ha")) {
if (integTestTaskName.contains("Secure")) { if (integTestTaskName.contains("Secure")) {
Path path = buildDir.toPath() Path path = buildDir.toPath()
.resolve("fixtures") .resolve("fixtures")
.resolve("secureHaHdfsFixture") .resolve("secureHaHdfsFixture")
.resolve("ports") .resolve("ports")
nonInputProperties.systemProperty "test.hdfs-fixture.ports", path nonInputProperties.systemProperty "test.hdfs-fixture.ports", path
} else { } else {
Path path = buildDir.toPath() Path path = buildDir.toPath()
.resolve("fixtures") .resolve("fixtures")
.resolve("haHdfsFixture") .resolve("haHdfsFixture")
.resolve("ports") .resolve("ports")
nonInputProperties.systemProperty "test.hdfs-fixture.ports", path nonInputProperties.systemProperty "test.hdfs-fixture.ports", path
} }
classpath += files("$buildDir/fixtures") classpath += files("$buildDir/fixtures")
@ -170,13 +170,13 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
if (integTestTaskName.contains("Secure")) { if (integTestTaskName.contains("Secure")) {
if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) { if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
jvmArgs "-Djava.security.krb5.conf=${krb5conf}" jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
nonInputProperties.systemProperty ( nonInputProperties.systemProperty(
"test.krb5.keytab.hdfs", "test.krb5.keytab.hdfs",
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab") project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
) )
} }
} }
} }
@ -187,8 +187,8 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
if (integTestTaskName.contains("Secure")) { if (integTestTaskName.contains("Secure")) {
systemProperty "java.security.krb5.conf", krb5conf systemProperty "java.security.krb5.conf", krb5conf
extraConfigFile( extraConfigFile(
"repository-hdfs/krb5.keytab", "repository-hdfs/krb5.keytab",
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
) )
} }
} }
@ -202,9 +202,9 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
if (nativePath != null) { if (nativePath != null) {
Path path = Paths.get(nativePath); Path path = Paths.get(nativePath);
if (Files.isDirectory(path) && if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) && Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) && Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true fixtureSupported = true
} else { } else {
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin"); throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
@ -274,27 +274,27 @@ integTestSecureHa.runner {
} }
thirdPartyAudit { thirdPartyAudit {
ignoreMissingClasses() ignoreMissingClasses()
ignoreViolations ( ignoreViolations(
// internal java api: sun.net.dns.ResolverConfiguration // internal java api: sun.net.dns.ResolverConfiguration
// internal java api: sun.net.util.IPAddressUtil // internal java api: sun.net.util.IPAddressUtil
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver', 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
// internal java api: sun.misc.Unsafe // internal java api: sun.misc.Unsafe
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO', 'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.nio.ch.DirectBuffer // internal java api: sun.nio.ch.DirectBuffer
// internal java api: sun.misc.Cleaner // internal java api: sun.misc.Cleaner
'org.apache.hadoop.io.nativeio.NativeIO$POSIX', 'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
'org.apache.hadoop.crypto.CryptoStreamUtils', 'org.apache.hadoop.crypto.CryptoStreamUtils',
// internal java api: sun.misc.SignalHandler // internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler', 'org.apache.hadoop.util.SignalLogger$Handler',
) )
} }

View file

@ -49,7 +49,7 @@ dependencies {
compile "joda-time:joda-time:${versions.joda}" compile "joda-time:joda-time:${versions.joda}"
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
// and whitelist this hack in JarHell // and whitelist this hack in JarHell
compile 'javax.xml.bind:jaxb-api:2.2.2' compile 'javax.xml.bind:jaxb-api:2.2.2'
} }
@ -149,9 +149,9 @@ if (useFixture) {
doLast { doLast {
minioDockerfile.parentFile.mkdirs() minioDockerfile.parentFile.mkdirs()
minioDockerfile.text = "FROM minio/minio:RELEASE.2019-01-23T23-18-58Z\n" + minioDockerfile.text = "FROM minio/minio:RELEASE.2019-01-23T23-18-58Z\n" +
"RUN mkdir -p /minio/data/${s3PermanentBucket}\n" + "RUN mkdir -p /minio/data/${s3PermanentBucket}\n" +
"ENV MINIO_ACCESS_KEY ${s3PermanentAccessKey}\n" + "ENV MINIO_ACCESS_KEY ${s3PermanentAccessKey}\n" +
"ENV MINIO_SECRET_KEY ${s3PermanentSecretKey}" "ENV MINIO_SECRET_KEY ${s3PermanentSecretKey}"
} }
} }
preProcessFixture { preProcessFixture {
@ -173,7 +173,7 @@ if (useFixture) {
thirdPartyTest { thirdPartyTest {
dependsOn tasks.bundlePlugin, tasks.postProcessFixture dependsOn tasks.bundlePlugin, tasks.postProcessFixture
nonInputProperties.systemProperty 'test.s3.endpoint', "${ -> minioAddress.call() }" nonInputProperties.systemProperty 'test.s3.endpoint', "${-> minioAddress.call()}"
} }
task integTestMinio(type: RestIntegTestTask) { task integTestMinio(type: RestIntegTestTask) {
@ -182,9 +182,9 @@ if (useFixture) {
runner { runner {
// Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968
systemProperty 'tests.rest.blacklist', [ systemProperty 'tests.rest.blacklist', [
'repository_s3/30_repository_temporary_credentials/*', 'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*', 'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*' 'repository_s3/50_repository_ecs_credentials/*'
].join(",") ].join(",")
} }
} }
@ -203,11 +203,11 @@ if (useFixture) {
} else { } else {
integTest.runner { integTest.runner {
systemProperty 'tests.rest.blacklist', systemProperty 'tests.rest.blacklist',
[ [
'repository_s3/30_repository_temporary_credentials/*', 'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*', 'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*' 'repository_s3/50_repository_ecs_credentials/*'
].join(",") ].join(",")
} }
} }
@ -219,15 +219,15 @@ File s3FixtureFile = new File(parentFixtures, 's3Fixture.properties')
task s3FixtureProperties { task s3FixtureProperties {
outputs.file(s3FixtureFile) outputs.file(s3FixtureFile)
def s3FixtureOptions = [ def s3FixtureOptions = [
"tests.seed" : BuildParams.testSeed, "tests.seed" : BuildParams.testSeed,
"s3Fixture.permanent_bucket_name" : s3PermanentBucket, "s3Fixture.permanent_bucket_name" : s3PermanentBucket,
"s3Fixture.permanent_key" : s3PermanentAccessKey, "s3Fixture.permanent_key" : s3PermanentAccessKey,
"s3Fixture.temporary_bucket_name" : s3TemporaryBucket, "s3Fixture.temporary_bucket_name" : s3TemporaryBucket,
"s3Fixture.temporary_key" : s3TemporaryAccessKey, "s3Fixture.temporary_key" : s3TemporaryAccessKey,
"s3Fixture.temporary_session_token": s3TemporarySessionToken, "s3Fixture.temporary_session_token": s3TemporarySessionToken,
"s3Fixture.ec2_bucket_name" : s3EC2Bucket, "s3Fixture.ec2_bucket_name" : s3EC2Bucket,
"s3Fixture.ecs_bucket_name" : s3ECSBucket, "s3Fixture.ecs_bucket_name" : s3ECSBucket,
"s3Fixture.disableChunkedEncoding" : s3DisableChunkedEncoding "s3Fixture.disableChunkedEncoding" : s3DisableChunkedEncoding
] ]
doLast { doLast {
@ -241,22 +241,22 @@ task s3Fixture(type: AntFixture) {
dependsOn s3FixtureProperties dependsOn s3FixtureProperties
inputs.file(s3FixtureFile) inputs.file(s3FixtureFile)
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java" executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3FixtureFile.getAbsolutePath() args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3FixtureFile.getAbsolutePath()
} }
processTestResources { processTestResources {
Map<String, Object> expansions = [ Map<String, Object> expansions = [
'permanent_bucket': s3PermanentBucket, 'permanent_bucket' : s3PermanentBucket,
'permanent_base_path': s3PermanentBasePath + "_integration_tests", 'permanent_base_path' : s3PermanentBasePath + "_integration_tests",
'temporary_bucket': s3TemporaryBucket, 'temporary_bucket' : s3TemporaryBucket,
'temporary_base_path': s3TemporaryBasePath + "_integration_tests", 'temporary_base_path' : s3TemporaryBasePath + "_integration_tests",
'ec2_bucket': s3EC2Bucket, 'ec2_bucket' : s3EC2Bucket,
'ec2_base_path': s3EC2BasePath, 'ec2_base_path' : s3EC2BasePath,
'ecs_bucket': s3ECSBucket, 'ecs_bucket' : s3ECSBucket,
'ecs_base_path': s3ECSBasePath, 'ecs_base_path' : s3ECSBasePath,
'disable_chunked_encoding': s3DisableChunkedEncoding, 'disable_chunked_encoding': s3DisableChunkedEncoding,
] ]
inputs.properties(expansions) inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
@ -296,10 +296,10 @@ if (useFixture) {
dependsOn(project.s3Fixture, 'bundlePlugin') dependsOn(project.s3Fixture, 'bundlePlugin')
runner { runner {
systemProperty 'tests.rest.blacklist', [ systemProperty 'tests.rest.blacklist', [
'repository_s3/10_basic/*', 'repository_s3/10_basic/*',
'repository_s3/20_repository_permanent_credentials/*', 'repository_s3/20_repository_permanent_credentials/*',
'repository_s3/30_repository_temporary_credentials/*', 'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*' 'repository_s3/40_repository_ec2_credentials/*'
].join(",") ].join(",")
} }
} }
@ -319,12 +319,12 @@ if (useFixture) {
} }
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// classes are missing // classes are missing
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy', 'org.apache.log.Hierarchy',
'org.apache.log.Logger', 'org.apache.log.Logger',
'software.amazon.ion.IonReader', 'software.amazon.ion.IonReader',
'software.amazon.ion.IonSystem', 'software.amazon.ion.IonSystem',
@ -335,7 +335,7 @@ thirdPartyAudit.ignoreMissingClasses (
'software.amazon.ion.system.IonSystemBuilder', 'software.amazon.ion.system.IonSystemBuilder',
'software.amazon.ion.system.IonTextWriterBuilder', 'software.amazon.ion.system.IonTextWriterBuilder',
'software.amazon.ion.system.IonWriterBuilder', 'software.amazon.ion.system.IonWriterBuilder',
// We don't use the kms dependency // We don't use the kms dependency
'com.amazonaws.services.kms.AWSKMS', 'com.amazonaws.services.kms.AWSKMS',
'com.amazonaws.services.kms.AWSKMSClient', 'com.amazonaws.services.kms.AWSKMSClient',
'com.amazonaws.services.kms.model.DecryptRequest', 'com.amazonaws.services.kms.model.DecryptRequest',

View file

@ -21,145 +21,145 @@ import org.elasticsearch.gradle.info.BuildParams
apply plugin: "nebula.maven-scm" apply plugin: "nebula.maven-scm"
esplugin { esplugin {
description 'The nio transport.' description 'The nio transport.'
classname 'org.elasticsearch.transport.nio.NioTransportPlugin' classname 'org.elasticsearch.transport.nio.NioTransportPlugin'
} }
dependencies { dependencies {
compile project(':libs:elasticsearch-nio') compile project(':libs:elasticsearch-nio')
// network stack // network stack
compile "io.netty:netty-buffer:${versions.netty}" compile "io.netty:netty-buffer:${versions.netty}"
compile "io.netty:netty-codec:${versions.netty}" compile "io.netty:netty-codec:${versions.netty}"
compile "io.netty:netty-codec-http:${versions.netty}" compile "io.netty:netty-codec-http:${versions.netty}"
compile "io.netty:netty-common:${versions.netty}" compile "io.netty:netty-common:${versions.netty}"
compile "io.netty:netty-handler:${versions.netty}" compile "io.netty:netty-handler:${versions.netty}"
compile "io.netty:netty-resolver:${versions.netty}" compile "io.netty:netty-resolver:${versions.netty}"
compile "io.netty:netty-transport:${versions.netty}" compile "io.netty:netty-transport:${versions.netty}"
} }
dependencyLicenses { dependencyLicenses {
mapping from: /netty-.*/, to: 'netty' mapping from: /netty-.*/, to: 'netty'
} }
thirdPartyAudit { thirdPartyAudit {
ignoreMissingClasses ( ignoreMissingClasses(
// from io.netty.handler.codec.protobuf.ProtobufDecoder (netty) // from io.netty.handler.codec.protobuf.ProtobufDecoder (netty)
'com.google.protobuf.ExtensionRegistry', 'com.google.protobuf.ExtensionRegistry',
'com.google.protobuf.MessageLite$Builder', 'com.google.protobuf.MessageLite$Builder',
'com.google.protobuf.MessageLite', 'com.google.protobuf.MessageLite',
'com.google.protobuf.Parser', 'com.google.protobuf.Parser',
// from io.netty.logging.CommonsLoggerFactory (netty) // from io.netty.logging.CommonsLoggerFactory (netty)
'org.apache.commons.logging.Log', 'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory', 'org.apache.commons.logging.LogFactory',
// from Log4j (deliberate, Netty will fallback to Log4j 2) // from Log4j (deliberate, Netty will fallback to Log4j 2)
'org.apache.log4j.Level', 'org.apache.log4j.Level',
'org.apache.log4j.Logger', 'org.apache.log4j.Logger',
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
'org.bouncycastle.jce.provider.BouncyCastleProvider', 'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty) // from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider',
'org.eclipse.jetty.npn.NextProtoNego', 'org.eclipse.jetty.npn.NextProtoNego',
// from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) // from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty)
'org.jboss.marshalling.ByteInput', 'org.jboss.marshalling.ByteInput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) // from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty)
'org.jboss.marshalling.ByteOutput', 'org.jboss.marshalling.ByteOutput',
// from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) // from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty)
'org.jboss.marshalling.Marshaller', 'org.jboss.marshalling.Marshaller',
// from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) // from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty)
'org.jboss.marshalling.MarshallerFactory', 'org.jboss.marshalling.MarshallerFactory',
'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller', 'org.jboss.marshalling.Unmarshaller',
// from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.helpers.FormattingTuple', 'org.slf4j.helpers.FormattingTuple',
'org.slf4j.helpers.MessageFormatter', 'org.slf4j.helpers.MessageFormatter',
'org.slf4j.Logger', 'org.slf4j.Logger',
'org.slf4j.LoggerFactory', 'org.slf4j.LoggerFactory',
'org.slf4j.spi.LocationAwareLogger', 'org.slf4j.spi.LocationAwareLogger',
'com.google.protobuf.ExtensionRegistryLite', 'com.google.protobuf.ExtensionRegistryLite',
'com.google.protobuf.MessageLiteOrBuilder', 'com.google.protobuf.MessageLiteOrBuilder',
'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano', 'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater', 'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater', 'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType', 'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib', 'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler', 'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder', 'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder', 'com.ning.compress.lzf.ChunkEncoder',
'com.ning.compress.lzf.LZFEncoder', 'com.ning.compress.lzf.LZFEncoder',
'com.ning.compress.lzf.util.ChunkDecoderFactory', 'com.ning.compress.lzf.util.ChunkDecoderFactory',
'com.ning.compress.lzf.util.ChunkEncoderFactory', 'com.ning.compress.lzf.util.ChunkEncoderFactory',
'lzma.sdk.lzma.Encoder', 'lzma.sdk.lzma.Encoder',
'net.jpountz.lz4.LZ4Compressor', 'net.jpountz.lz4.LZ4Compressor',
'net.jpountz.lz4.LZ4Factory', 'net.jpountz.lz4.LZ4Factory',
'net.jpountz.lz4.LZ4FastDecompressor', 'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.XXHash32', 'net.jpountz.xxhash.XXHash32',
'net.jpountz.xxhash.XXHashFactory', 'net.jpountz.xxhash.XXHashFactory',
'org.eclipse.jetty.alpn.ALPN$ClientProvider', 'org.eclipse.jetty.alpn.ALPN$ClientProvider',
'org.eclipse.jetty.alpn.ALPN$ServerProvider', 'org.eclipse.jetty.alpn.ALPN$ServerProvider',
'org.eclipse.jetty.alpn.ALPN', 'org.eclipse.jetty.alpn.ALPN',
'org.conscrypt.AllocatedBuffer', 'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator', 'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt', 'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener', 'org.conscrypt.HandshakeListener',
// from io.netty.handler.ssl.OpenSslEngine (netty) // from io.netty.handler.ssl.OpenSslEngine (netty)
'io.netty.internal.tcnative.Buffer', 'io.netty.internal.tcnative.Buffer',
'io.netty.internal.tcnative.Library', 'io.netty.internal.tcnative.Library',
'io.netty.internal.tcnative.SSL', 'io.netty.internal.tcnative.SSL',
'io.netty.internal.tcnative.SSLContext', 'io.netty.internal.tcnative.SSLContext',
'io.netty.internal.tcnative.SSLPrivateKeyMethod', 'io.netty.internal.tcnative.SSLPrivateKeyMethod',
'io.netty.internal.tcnative.CertificateCallback', 'io.netty.internal.tcnative.CertificateCallback',
'io.netty.internal.tcnative.CertificateVerifier', 'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey', 'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher', 'io.netty.internal.tcnative.SniHostNameMatcher',
) )
ignoreViolations ( ignoreViolations(
'io.netty.util.internal.PlatformDependent0', 'io.netty.util.internal.PlatformDependent0',
'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$1',
'io.netty.util.internal.PlatformDependent0$2', 'io.netty.util.internal.PlatformDependent0$2',
'io.netty.util.internal.PlatformDependent0$3', 'io.netty.util.internal.PlatformDependent0$3',
'io.netty.util.internal.PlatformDependent0$5', 'io.netty.util.internal.PlatformDependent0$5',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator' 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator'
) )
} }
rootProject.globalInfo.ready { rootProject.globalInfo.ready {
if (BuildParams.inFipsJvm == false) { if (BuildParams.inFipsJvm == false) {
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
// a FIPS JVM with BouncyCastleFIPS Provider // a FIPS JVM with BouncyCastleFIPS Provider
thirdPartyAudit.ignoreMissingClasses( thirdPartyAudit.ignoreMissingClasses(
'org.bouncycastle.asn1.x500.X500Name' 'org.bouncycastle.asn1.x500.X500Name'
) )
} }
} }

View file

@ -1,4 +1,3 @@
import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.elasticsearch.gradle.testclusters.TestClustersPlugin

View file

@ -22,5 +22,5 @@ apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.test-with-dependencies' apply plugin: 'elasticsearch.test-with-dependencies'
dependencies { dependencies {
testCompile project(":client:rest-high-level") testCompile project(":client:rest-high-level")
} }

View file

@ -23,19 +23,19 @@ apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
esplugin { esplugin {
description 'Die with dignity plugin' description 'Die with dignity plugin'
classname 'org.elasticsearch.DieWithDignityPlugin' classname 'org.elasticsearch.DieWithDignityPlugin'
} }
integTest.runner { integTest.runner {
systemProperty 'tests.security.manager', 'false' systemProperty 'tests.security.manager', 'false'
systemProperty 'tests.system_call_filter', 'false' systemProperty 'tests.system_call_filter', 'false'
nonInputProperties.systemProperty 'log', "${-> testClusters.integTest.singleNode().getServerLog()}" nonInputProperties.systemProperty 'log', "${-> testClusters.integTest.singleNode().getServerLog()}"
systemProperty 'runtime.java.home', BuildParams.runtimeJavaHome systemProperty 'runtime.java.home', BuildParams.runtimeJavaHome
} }
testClusters.integTest { testClusters.integTest {
systemProperty "die.with.dignity.test", "whatever" systemProperty "die.with.dignity.test", "whatever"
} }
test.enabled = false test.enabled = false

View file

@ -37,16 +37,16 @@ test {
} }
thirdPartyAudit { thirdPartyAudit {
ignoreMissingClasses ( ignoreMissingClasses(
'com.ibm.icu.lang.UCharacter' 'com.ibm.icu.lang.UCharacter'
) )
ignoreViolations ( ignoreViolations(
// uses internal java api: sun.misc.Unsafe // uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell', 'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1'
) )
} }

View file

@ -35,7 +35,7 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
testClusters { testClusters {
"${baseName}" { "${baseName}" {
versions = [ bwcVersion.toString(), project.version ] versions = [bwcVersion.toString(), project.version]
numberOfNodes = 2 numberOfNodes = 2
// some tests rely on the translog not being flushed // some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m' setting 'indices.memory.shard_inactive_time', '20m'
@ -67,15 +67,15 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach { tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach {
it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT") it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT")
it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
bwcTest.dependsOn( bwcTest.dependsOn(
tasks.register("${baseName}#bwcTest") { tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest") dependsOn tasks.named("${baseName}#upgradedClusterTest")
} }
) )
} }
} }
@ -103,4 +103,4 @@ artifacts {
testArtifacts testJar testArtifacts testJar
} }
test.enabled = false test.enabled = false

View file

@ -1,4 +1,4 @@
/* /*
* Licensed to Elasticsearch under one or more contributor * Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with * license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright * this work for additional information regarding copyright
@ -22,19 +22,19 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.standalone-test'
testClusters.integTest { testClusters.integTest {
/** /**
* Provide a custom log4j configuration where layout is an old style pattern and confirm that Elasticsearch * Provide a custom log4j configuration where layout is an old style pattern and confirm that Elasticsearch
* can successfully startup. * can successfully startup.
*/ */
extraConfigFile 'log4j2.properties', file('custom-log4j2.properties') extraConfigFile 'log4j2.properties', file('custom-log4j2.properties')
} }
integTest.runner { integTest.runner {
nonInputProperties.systemProperty 'tests.logfile', nonInputProperties.systemProperty 'tests.logfile',
"${ -> testClusters.integTest.singleNode().getServerLog().absolutePath.replaceAll(".json", ".log")}" "${-> testClusters.integTest.singleNode().getServerLog().absolutePath.replaceAll(".json", ".log")}"
} }
test { test {
systemProperty 'tests.security.manager', 'false' systemProperty 'tests.security.manager', 'false'
} }

View file

@ -39,14 +39,14 @@ dependencies {
} }
processTestResources { processTestResources {
from ({ zipTree(configurations.restSpec.singleFile) }) from({ zipTree(configurations.restSpec.singleFile) })
dependsOn configurations.restSpec dependsOn configurations.restSpec
} }
for (Version bwcVersion : bwcVersions.wireCompatible) { for (Version bwcVersion : bwcVersions.wireCompatible) {
if (bwcVersion == VersionProperties.getElasticsearchVersion()) { if (bwcVersion == VersionProperties.getElasticsearchVersion()) {
// Not really a mixed cluster // Not really a mixed cluster
continue ; continue;
} }
String baseName = "v${bwcVersion}" String baseName = "v${bwcVersion}"
@ -55,7 +55,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
the nodes has a different minor. */ the nodes has a different minor. */
testClusters { testClusters {
"${baseName}" { "${baseName}" {
versions = [ bwcVersion.toString(), project.version ] versions = [bwcVersion.toString(), project.version]
numberOfNodes = 4 numberOfNodes = 4
setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
@ -69,16 +69,16 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
doFirst { doFirst {
project.delete("${buildDir}/cluster/shared/repo/${baseName}") project.delete("${buildDir}/cluster/shared/repo/${baseName}")
// Getting the endpoints causes a wait for the cluster // Getting the endpoints causes a wait for the cluster
println "Test cluster endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",") }" println "Test cluster endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}"
println "Upgrading one node to create a mixed cluster" println "Upgrading one node to create a mixed cluster"
testClusters."${baseName}".nextNodeToNextVersion() testClusters."${baseName}".nextNodeToNextVersion()
// Getting the endpoints causes a wait for the cluster // Getting the endpoints causes a wait for the cluster
println "Upgrade complete, endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",") }" println "Upgrade complete, endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}"
println "Upgrading another node to create a mixed cluster" println "Upgrading another node to create a mixed cluster"
testClusters."${baseName}".nextNodeToNextVersion() testClusters."${baseName}".nextNodeToNextVersion()
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
onlyIf { project.bwc_tests_enabled } onlyIf { project.bwc_tests_enabled }

View file

@ -48,7 +48,7 @@ task mixedClusterTest(type: RestIntegTestTask) {
testClusters.mixedClusterTest { testClusters.mixedClusterTest {
setting 'cluster.remote.my_remote_cluster.seeds', setting 'cluster.remote.my_remote_cluster.seeds',
{ "\"${testClusters.'remote-cluster'.getAllTransportPortURI().get(0)}\"" } { "\"${testClusters.'remote-cluster'.getAllTransportPortURI().get(0)}\"" }
setting 'cluster.remote.connections_per_cluster', '1' setting 'cluster.remote.connections_per_cluster', '1'
setting 'cluster.remote.connect', 'true' setting 'cluster.remote.connect', 'true'
} }

View file

@ -51,7 +51,7 @@ testingConventions.enabled = false
tasks.dependencyLicenses.enabled = false tasks.dependencyLicenses.enabled = false
tasks.dependenciesInfo.enabled = false tasks.dependenciesInfo.enabled = false
tasks.thirdPartyAudit.ignoreMissingClasses () tasks.thirdPartyAudit.ignoreMissingClasses()
tasks.register('destructivePackagingTest') { tasks.register('destructivePackagingTest') {
dependsOn 'destructiveDistroTest', 'destructiveBatsTest.oss', 'destructiveBatsTest.default' dependsOn 'destructiveDistroTest', 'destructiveBatsTest.oss', 'destructiveBatsTest.default'

View file

@ -40,7 +40,7 @@ dependencies {
} }
processTestResources { processTestResources {
from ({ zipTree(configurations.restSpec.singleFile) }) { from({ zipTree(configurations.restSpec.singleFile) }) {
include 'rest-api-spec/api/**' include 'rest-api-spec/api/**'
} }
dependsOn configurations.restSpec dependsOn configurations.restSpec
@ -64,7 +64,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
testClusters { testClusters {
"${baseName}" { "${baseName}" {
versions = [ bwcVersion.toString(), project.version ] versions = [bwcVersion.toString(), project.version]
numberOfNodes = 3 numberOfNodes = 3
setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'repositories.url.allowed_urls', 'http://snapshot.test*'
@ -82,8 +82,8 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
project.delete("${buildDir}/cluster/shared/repo/${baseName}") project.delete("${buildDir}/cluster/shared/repo/${baseName}")
} }
systemProperty 'tests.rest.suite', 'old_cluster' systemProperty 'tests.rest.suite', 'old_cluster'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
tasks.register("${baseName}#oneThirdUpgradedTest", RestTestRunnerTask) { tasks.register("${baseName}#oneThirdUpgradedTest", RestTestRunnerTask) {
@ -94,8 +94,8 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
} }
systemProperty 'tests.rest.suite', 'mixed_cluster' systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'true' systemProperty 'tests.first_round', 'true'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
tasks.register("${baseName}#twoThirdsUpgradedTest", RestTestRunnerTask) { tasks.register("${baseName}#twoThirdsUpgradedTest", RestTestRunnerTask) {
@ -106,8 +106,8 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
} }
systemProperty 'tests.rest.suite', 'mixed_cluster' systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'false' systemProperty 'tests.first_round', 'false'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) { tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) {
@ -117,15 +117,15 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
} }
useCluster testClusters."${baseName}" useCluster testClusters."${baseName}"
systemProperty 'tests.rest.suite', 'upgraded_cluster' systemProperty 'tests.rest.suite', 'upgraded_cluster'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
bwcTest.dependsOn( bwcTest.dependsOn(
tasks.register("${baseName}#bwcTest") { tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest") dependsOn tasks.named("${baseName}#upgradedClusterTest")
} }
) )
} }
} }

View file

@ -23,15 +23,15 @@ apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.test-with-dependencies' apply plugin: 'elasticsearch.test-with-dependencies'
dependencies { dependencies {
testCompile "com.fasterxml.jackson.core:jackson-databind:2.8.11" testCompile "com.fasterxml.jackson.core:jackson-databind:2.8.11"
testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') // for http testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') // for http
testCompile project(path: ':plugins:transport-nio', configuration: 'runtime') // for http testCompile project(path: ':plugins:transport-nio', configuration: 'runtime') // for http
} }
integTest.runner { integTest.runner {
/* /*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty. * other if we allow them to set the number of available processors as it's set-once in Netty.
*/ */
systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'es.set.netty.runtime.available.processors', 'false'
} }

View file

@ -22,9 +22,9 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-test'
dependencies { dependencies {
testCompile project(path: ':modules:ingest-common', configuration: 'runtime') testCompile project(path: ':modules:ingest-common', configuration: 'runtime')
} }
testClusters.integTest { testClusters.integTest {
setting 'node.ingest', 'false' setting 'node.ingest', 'false'
} }

View file

@ -22,17 +22,17 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-test'
dependencies { dependencies {
testCompile project(path: ':modules:ingest-common', configuration: 'runtime') testCompile project(path: ':modules:ingest-common', configuration: 'runtime')
testCompile project(path: ':modules:ingest-geoip', configuration: 'runtime') testCompile project(path: ':modules:ingest-geoip', configuration: 'runtime')
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
testCompile project(path: ':modules:lang-painless', configuration: 'runtime') testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
testCompile project(path: ':modules:reindex', configuration: 'runtime') testCompile project(path: ':modules:reindex', configuration: 'runtime')
} }
testingConventions { testingConventions {
naming { naming {
IT { IT {
baseClass 'org.elasticsearch.ingest.AbstractScriptTestCase' baseClass 'org.elasticsearch.ingest.AbstractScriptTestCase'
}
} }
}
} }

View file

@ -38,9 +38,9 @@ integTest.runner {
} }
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'oss'))) { if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'oss'))) {
systemProperty 'tests.rest.blacklist', [ systemProperty 'tests.rest.blacklist', [
'cat.templates/10_basic/No templates', 'cat.templates/10_basic/No templates',
'cat.templates/10_basic/Sort templates', 'cat.templates/10_basic/Sort templates',
'cat.templates/10_basic/Multiple template', 'cat.templates/10_basic/Multiple template',
].join(',') ].join(',')
} }
} }

View file

@ -29,5 +29,5 @@ testClusters.integTest {
integTest.runner { integTest.runner {
nonInputProperties.systemProperty 'tests.logfile', nonInputProperties.systemProperty 'tests.logfile',
"${ -> testClusters.integTest.singleNode().getServerLog() }" "${-> testClusters.integTest.singleNode().getServerLog()}"
} }

View file

@ -26,38 +26,38 @@ apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.standalone-test'
tasks.register("bwcTest") { tasks.register("bwcTest") {
description = 'Runs backwards compatibility tests.' description = 'Runs backwards compatibility tests.'
group = 'verification' group = 'verification'
} }
for (Version bwcVersion : bwcVersions.indexCompatible) { for (Version bwcVersion : bwcVersions.indexCompatible) {
String baseName = "v${bwcVersion}" String baseName = "v${bwcVersion}"
testClusters { testClusters {
"${baseName}" { "${baseName}" {
version = bwcVersion.toString() version = bwcVersion.toString()
setting 'http.content_type.required', 'true' setting 'http.content_type.required', 'true'
javaHome = BuildParams.runtimeJavaHome javaHome = BuildParams.runtimeJavaHome
}
} }
}
tasks.register("${baseName}#integTest", RestTestRunnerTask) { tasks.register("${baseName}#integTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}" useCluster testClusters."${baseName}"
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
} }
tasks.register("${baseName}#bwcTest") { tasks.register("${baseName}#bwcTest") {
dependsOn "${baseName}#integTest" dependsOn "${baseName}#integTest"
} }
bwcTest.dependsOn("${baseName}#bwcTest") bwcTest.dependsOn("${baseName}#bwcTest")
} }
task bwcTestSnapshots { task bwcTestSnapshots {
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
for (version in bwcVersions.unreleasedIndexCompatible) { for (version in bwcVersions.unreleasedIndexCompatible) {
dependsOn "v${version}#bwcTest" dependsOn "v${version}#bwcTest"
} }
} }
} }
@ -86,4 +86,4 @@ task verifyDocsLuceneVersion {
check.dependsOn bwcTestSnapshots, verifyDocsLuceneVersion check.dependsOn bwcTestSnapshots, verifyDocsLuceneVersion
test.enabled = false test.enabled = false

View file

@ -37,183 +37,183 @@ final String wildflyInstall = "${buildDir}/wildfly/wildfly-${wildflyVersion}"
int managementPort int managementPort
repositories { repositories {
// the Wildfly distribution is not available via a repository, so we fake an Ivy repository on top of the download site // the Wildfly distribution is not available via a repository, so we fake an Ivy repository on top of the download site
ivy { ivy {
name "wildfly" name "wildfly"
url "https://download.jboss.org" url "https://download.jboss.org"
metadataSources { metadataSources {
artifact() artifact()
}
patternLayout {
artifact 'wildfly/[revision]/[module]-[revision].[ext]'
}
} }
patternLayout {
artifact 'wildfly/[revision]/[module]-[revision].[ext]'
}
}
} }
configurations { configurations {
wildfly wildfly
} }
dependencies { dependencies {
providedCompile 'javax.enterprise:cdi-api:1.2' providedCompile 'javax.enterprise:cdi-api:1.2'
providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final' providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final'
providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.0.Final' providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.0.Final'
compile ('org.jboss.resteasy:resteasy-jackson2-provider:3.0.19.Final') { compile('org.jboss.resteasy:resteasy-jackson2-provider:3.0.19.Final') {
exclude module: 'jackson-annotations' exclude module: 'jackson-annotations'
exclude module: 'jackson-core' exclude module: 'jackson-core'
exclude module: 'jackson-databind' exclude module: 'jackson-databind'
exclude module: 'jackson-jaxrs-json-provider' exclude module: 'jackson-jaxrs-json-provider'
} }
compile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" compile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" compile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}" compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}"
compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:${versions.jackson}" compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:${versions.jackson}"
compile "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" compile "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}"
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}" compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
compile "org.apache.logging.log4j:log4j-core:${versions.log4j}" compile "org.apache.logging.log4j:log4j-core:${versions.log4j}"
compile project(':client:rest-high-level') compile project(':client:rest-high-level')
wildfly "org.jboss:wildfly:${wildflyVersion}@zip" wildfly "org.jboss:wildfly:${wildflyVersion}@zip"
testCompile project(':test:framework') testCompile project(':test:framework')
} }
task unzipWildfly(type: Sync) { task unzipWildfly(type: Sync) {
into wildflyDir into wildflyDir
from { zipTree(configurations.wildfly.singleFile) } from { zipTree(configurations.wildfly.singleFile) }
} }
task deploy(type: Copy) { task deploy(type: Copy) {
dependsOn unzipWildfly, war dependsOn unzipWildfly, war
from war from war
into "${wildflyInstall}/standalone/deployments" into "${wildflyInstall}/standalone/deployments"
} }
task writeElasticsearchProperties(type: DefaultTestClustersTask) { task writeElasticsearchProperties(type: DefaultTestClustersTask) {
onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) } onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) }
useCluster testClusters.integTest useCluster testClusters.integTest
dependsOn deploy dependsOn deploy
doLast { doLast {
final File elasticsearchProperties = file("${wildflyInstall}/standalone/configuration/elasticsearch.properties") final File elasticsearchProperties = file("${wildflyInstall}/standalone/configuration/elasticsearch.properties")
elasticsearchProperties.write( elasticsearchProperties.write(
[ [
"http.uri=${-> testClusters.integTest.getAllHttpSocketURI().get(0)}" "http.uri=${-> testClusters.integTest.getAllHttpSocketURI().get(0)}"
].join("\n")) ].join("\n"))
} }
} }
// the default configuration ships with IPv6 disabled but our cluster could be bound to IPv6 if the host supports it // the default configuration ships with IPv6 disabled but our cluster could be bound to IPv6 if the host supports it
task enableIPv6 { task enableIPv6 {
dependsOn unzipWildfly dependsOn unzipWildfly
doLast { doLast {
final File standaloneConf = file("${wildflyInstall}/bin/standalone.conf") final File standaloneConf = file("${wildflyInstall}/bin/standalone.conf")
final List<String> lines = final List<String> lines =
Files.readAllLines(standaloneConf.toPath()) Files.readAllLines(standaloneConf.toPath())
.collect { line -> line.replace("-Djava.net.preferIPv4Stack=true", "-Djava.net.preferIPv4Stack=false") } .collect { line -> line.replace("-Djava.net.preferIPv4Stack=true", "-Djava.net.preferIPv4Stack=false") }
standaloneConf.write(lines.join("\n")) standaloneConf.write(lines.join("\n"))
} }
} }
task startWildfly { task startWildfly {
dependsOn enableIPv6, writeElasticsearchProperties dependsOn enableIPv6, writeElasticsearchProperties
doLast { doLast {
// we skip these tests on Windows so we do no need to worry about compatibility here // we skip these tests on Windows so we do no need to worry about compatibility here
final ProcessBuilder wildfly = new ProcessBuilder( final ProcessBuilder wildfly = new ProcessBuilder(
"${wildflyInstall}/bin/standalone.sh", "${wildflyInstall}/bin/standalone.sh",
"-Djboss.http.port=0", "-Djboss.http.port=0",
"-Djboss.https.port=0", "-Djboss.https.port=0",
"-Djboss.management.http.port=0") "-Djboss.management.http.port=0")
final Process process = wildfly.start() final Process process = wildfly.start()
new BufferedReader(new InputStreamReader(process.getInputStream())).withReader { br -> new BufferedReader(new InputStreamReader(process.getInputStream())).withReader { br ->
String line String line
int httpPort = 0 int httpPort = 0
while ((line = br.readLine()) != null) { while ((line = br.readLine()) != null) {
logger.info(line) logger.info(line)
if (line.matches('.*Undertow HTTP listener default listening on .*:\\d+$')) { if (line.matches('.*Undertow HTTP listener default listening on .*:\\d+$')) {
assert httpPort == 0 assert httpPort == 0
final int index = line.lastIndexOf(":") final int index = line.lastIndexOf(":")
assert index >= 0 assert index >= 0
httpPort = Integer.parseInt(line.substring(index + 1)) httpPort = Integer.parseInt(line.substring(index + 1))
// set this system property so the test runner knows the port Wildfly is listening for HTTP requests on // set this system property so the test runner knows the port Wildfly is listening for HTTP requests on
integTestRunner.systemProperty("tests.jboss.root", "http://localhost:$httpPort/wildfly-$version/transport") integTestRunner.systemProperty("tests.jboss.root", "http://localhost:$httpPort/wildfly-$version/transport")
} else if (line.matches('.*Http management interface listening on http://.*:\\d+/management$')) { } else if (line.matches('.*Http management interface listening on http://.*:\\d+/management$')) {
assert managementPort == 0 assert managementPort == 0
final int colonIndex = line.lastIndexOf(":") final int colonIndex = line.lastIndexOf(":")
assert colonIndex >= 0 assert colonIndex >= 0
final int slashIndex = line.lastIndexOf("/") final int slashIndex = line.lastIndexOf("/")
assert slashIndex >= 0 assert slashIndex >= 0
managementPort = Integer.parseInt(line.substring(colonIndex + 1, slashIndex)) managementPort = Integer.parseInt(line.substring(colonIndex + 1, slashIndex))
/* /*
* As soon as we know the management port, we fork a process that will ensure the Wildfly process is killed if we * As soon as we know the management port, we fork a process that will ensure the Wildfly process is killed if we
* teardown abnormally. We skip these tests on Windows so we do not need to worry about CLI compatibility here. * teardown abnormally. We skip these tests on Windows so we do not need to worry about CLI compatibility here.
*/ */
final File script = new File(project.buildDir, "wildfly/wildfly.killer.sh") final File script = new File(project.buildDir, "wildfly/wildfly.killer.sh")
script.setText( script.setText(
["function shutdown {", ["function shutdown {",
" ${wildflyInstall}/bin/jboss-cli.sh --controller=localhost:${-> managementPort} --connect command=shutdown", " ${wildflyInstall}/bin/jboss-cli.sh --controller=localhost:${-> managementPort} --connect command=shutdown",
"}", "}",
"trap shutdown EXIT", "trap shutdown EXIT",
// will wait indefinitely for input, but we never pass input, and the pipe is only closed when the build dies // will wait indefinitely for input, but we never pass input, and the pipe is only closed when the build dies
"read line\n"].join('\n'), 'UTF-8') "read line\n"].join('\n'), 'UTF-8')
final ProcessBuilder killer = new ProcessBuilder("bash", script.absolutePath) final ProcessBuilder killer = new ProcessBuilder("bash", script.absolutePath)
killer.start() killer.start()
} else if (line.matches(".*WildFly Full \\d+\\.\\d+\\.\\d+\\.Final \\(WildFly Core \\d+\\.\\d+\\.\\d+\\.Final\\) started.*")) { } else if (line.matches(".*WildFly Full \\d+\\.\\d+\\.\\d+\\.Final \\(WildFly Core \\d+\\.\\d+\\.\\d+\\.Final\\) started.*")) {
break break
}
}
assert httpPort > 0
assert managementPort > 0
} }
}
assert httpPort > 0
assert managementPort > 0
} }
}
} }
task configureClient(type: LoggedExec) { task configureClient(type: LoggedExec) {
dependsOn startWildfly dependsOn startWildfly
// we skip these tests on Windows so we do not need to worry about compatibility here // we skip these tests on Windows so we do not need to worry about compatibility here
commandLine "${wildflyInstall}/bin/jboss-cli.sh", commandLine "${wildflyInstall}/bin/jboss-cli.sh",
"--controller=localhost:${-> managementPort}", "--controller=localhost:${-> managementPort}",
"--connect", "--connect",
"--command=/system-property=elasticsearch.properties:add(value=\${jboss.server.config.dir}/elasticsearch.properties)" "--command=/system-property=elasticsearch.properties:add(value=\${jboss.server.config.dir}/elasticsearch.properties)"
} }
task stopWildfly(type: LoggedExec) { task stopWildfly(type: LoggedExec) {
// we skip these tests on Windows so we do not need to worry about CLI compatibility here // we skip these tests on Windows so we do not need to worry about CLI compatibility here
commandLine "${wildflyInstall}/bin/jboss-cli.sh", "--controller=localhost:${-> managementPort}", "--connect", "command=shutdown" commandLine "${wildflyInstall}/bin/jboss-cli.sh", "--controller=localhost:${-> managementPort}", "--connect", "command=shutdown"
} }
if (!Os.isFamily(Os.FAMILY_WINDOWS)) { if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
integTestRunner.dependsOn(configureClient) integTestRunner.dependsOn(configureClient)
final TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { final TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
@Override @Override
void afterExecute(final Task task, final TaskState state) { void afterExecute(final Task task, final TaskState state) {
if (state.failure != null) { if (state.failure != null) {
final File logFile = new File(wildflyInstall, "standalone/log/server.log") final File logFile = new File(wildflyInstall, "standalone/log/server.log")
println("\nWildfly server log (from ${logFile}):") println("\nWildfly server log (from ${logFile}):")
println('-----------------------------------------') println('-----------------------------------------')
final Stream<String> stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8) final Stream<String> stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8)
try { try {
for (String line : stream) { for (String line : stream) {
println(line) println(line)
} }
} finally { } finally {
stream.close() stream.close()
}
println('=========================================')
}
} }
println('=========================================')
}
} }
integTestRunner.doFirst { }
project.gradle.addListener(logDumpListener) integTestRunner.doFirst {
} project.gradle.addListener(logDumpListener)
integTestRunner.doLast { }
project.gradle.removeListener(logDumpListener) integTestRunner.doLast {
} project.gradle.removeListener(logDumpListener)
integTestRunner.finalizedBy(stopWildfly) }
integTestRunner.finalizedBy(stopWildfly)
} else { } else {
integTest.enabled = false integTest.enabled = false
testingConventions.enabled = false testingConventions.enabled = false
} }
check.dependsOn(integTest) check.dependsOn(integTest)
@ -227,11 +227,11 @@ thirdPartyAudit.enabled = false
testingConventions { testingConventions {
naming.clear() naming.clear()
// We only have one "special" integration test here to connect to wildfly // We only have one "special" integration test here to connect to wildfly
naming { naming {
IT { IT {
baseClass 'org.apache.lucene.util.LuceneTestCase' baseClass 'org.apache.lucene.util.LuceneTestCase'
}
} }
}
} }

View file

@ -43,11 +43,11 @@ if (!isEclipse && !isIdea) {
} }
} }
} }
configurations { configurations {
java12Compile.extendsFrom(compile) java12Compile.extendsFrom(compile)
} }
dependencies { dependencies {
java12Compile sourceSets.main.output java12Compile sourceSets.main.output
} }
@ -206,7 +206,7 @@ processResources {
dependsOn generateModulesList, generatePluginsList dependsOn generateModulesList, generatePluginsList
} }
thirdPartyAudit.ignoreMissingClasses ( thirdPartyAudit.ignoreMissingClasses(
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
'com.fasterxml.jackson.databind.ObjectMapper', 'com.fasterxml.jackson.databind.ObjectMapper',
@ -322,9 +322,9 @@ dependencyLicenses {
mapping from: /lucene-.*/, to: 'lucene' mapping from: /lucene-.*/, to: 'lucene'
dependencies = project.configurations.runtime.fileCollection { dependencies = project.configurations.runtime.fileCollection {
it.group.startsWith('org.elasticsearch') == false || it.group.startsWith('org.elasticsearch') == false ||
// keep the following org.elasticsearch jars in // keep the following org.elasticsearch jars in
(it.name == 'jna' || (it.name == 'jna' ||
it.name == 'securesm') it.name == 'securesm')
} }
} }
@ -333,13 +333,13 @@ task integTest(type: Test) {
description = 'Multi-node tests' description = 'Multi-node tests'
mustRunAfter test mustRunAfter test
include '**/*IT.class' include '**/*IT.class'
} }
check.dependsOn integTest check.dependsOn integTest
task internalClusterTest { task internalClusterTest {
dependsOn integTest dependsOn integTest
} }

View file

@ -64,11 +64,11 @@ List projects = [
] ]
/** /**
* Iterates over sub directories, looking for build.gradle, and adds a project if found * Iterates over sub directories, looking for build.gradle, and adds a project if found
* for that dir with the given path prefix. Note that this requires each level * for that dir with the given path prefix. Note that this requires each level
* of the dir hierarchy to have a build.gradle. Otherwise we would have to iterate * of the dir hierarchy to have a build.gradle. Otherwise we would have to iterate
* all files/directories in the source tree to find all projects. * all files/directories in the source tree to find all projects.
*/ */
void addSubProjects(String path, File dir) { void addSubProjects(String path, File dir) {
if (dir.isDirectory() == false) return; if (dir.isDirectory() == false) return;
if (dir.name == 'buildSrc') return; if (dir.name == 'buildSrc') return;
@ -78,12 +78,12 @@ void addSubProjects(String path, File dir) {
final String projectName = "${path}:${dir.name}" final String projectName = "${path}:${dir.name}"
include projectName include projectName
if (path.isEmpty() || path.startsWith(':example-plugins')) { if (path.isEmpty() || path.startsWith(':example-plugins')) {
project(projectName).projectDir = dir project(projectName).projectDir = dir
}
for (File subdir : dir.listFiles()) {
addSubProjects(projectName, subdir)
}
} }
for (File subdir : dir.listFiles()) {
addSubProjects(projectName, subdir)
}
}
// include example plugins first, so adding plugin dirs below won't muck with :example-plugins // include example plugins first, so adding plugin dirs below won't muck with :example-plugins
@ -110,7 +110,7 @@ project(':build-tools').projectDir = new File(rootProject.projectDir, 'buildSrc'
project(':build-tools:reaper').projectDir = new File(rootProject.projectDir, 'buildSrc/reaper') project(':build-tools:reaper').projectDir = new File(rootProject.projectDir, 'buildSrc/reaper')
project(":libs").children.each { libsProject -> project(":libs").children.each { libsProject ->
libsProject.name = "elasticsearch-${libsProject.name}" libsProject.name = "elasticsearch-${libsProject.name}"
} }
// look for extra plugins for elasticsearch // look for extra plugins for elasticsearch

View file

@ -23,17 +23,17 @@ description = 'Fixture for Azure external service'
test.enabled = false test.enabled = false
dependencies { dependencies {
compile project(':server') compile project(':server')
} }
preProcessFixture { preProcessFixture {
dependsOn jar dependsOn jar
doLast { doLast {
file("${testFixturesDir}/shared").mkdirs() file("${testFixturesDir}/shared").mkdirs()
project.copy { project.copy {
from jar from jar
from configurations.runtimeClasspath from configurations.runtimeClasspath
into "${testFixturesDir}/shared" into "${testFixturesDir}/shared"
}
} }
} }
}

View file

@ -1,4 +1,3 @@
subprojects { subprojects {
// fixtures are mostly external and by default we don't want to check forbidden apis // fixtures are mostly external and by default we don't want to check forbidden apis
forbiddenApisMain.enabled = false forbiddenApisMain.enabled = false

Some files were not shown because too many files have changed in this diff Show more