mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-23 14:47:31 -04:00
* Update Gradle wrapper to 8.13 (#122421)
* Fix Gradle Deprecation warning as declaring an is- property with a Boolean type has been deprecated.
* Make use of new layout.settingsFolder api to address some cross project references
* Fix buildParams snapshot check for multiprojet projects
(cherry picked from commit e19b2264af
)
# Conflicts:
# build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java
# build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
# docs/build.gradle
# qa/lucene-index-compatibility/build.gradle
# x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle
# x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/build.gradle
* More fixes
650 lines
23 KiB
Groovy
650 lines
23 KiB
Groovy
/*
|
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
* or more contributor license agreements. Licensed under the "Elastic License
|
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
|
* Public License v 1"; you may not use this file except in compliance with, at
|
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
|
*/
|
|
|
|
|
|
import org.apache.tools.ant.filters.FixCrLfFilter
|
|
import org.apache.tools.ant.filters.ReplaceTokens
|
|
import org.elasticsearch.gradle.VersionProperties
|
|
import org.elasticsearch.gradle.internal.ConcatFilesTask
|
|
import org.elasticsearch.gradle.internal.DependenciesInfoPlugin
|
|
import org.elasticsearch.gradle.internal.NoticeTask
|
|
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin
|
|
|
|
import java.nio.file.Files
|
|
import java.nio.file.Path
|
|
|
|
plugins {
|
|
id 'base'
|
|
id 'elasticsearch.distro'
|
|
}
|
|
|
|
configurations {
|
|
log4jConfig
|
|
dependencyInfos {
|
|
attributes {
|
|
attribute(Category.CATEGORY_ATTRIBUTE, project.getObjects().named(Category.class, Category.DOCUMENTATION))
|
|
}
|
|
}
|
|
featuresMetadata {
|
|
attributes {
|
|
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
|
|
}
|
|
}
|
|
}
|
|
|
|
dependencies {
|
|
featuresMetadata project(':server')
|
|
}
|
|
|
|
def thisProj = project
|
|
rootProject.allprojects { proj ->
|
|
proj.plugins.withType(DependenciesInfoPlugin) {
|
|
thisProj.dependencies.add("dependencyInfos", project.dependencies.project(path: proj.path))
|
|
}
|
|
}
|
|
|
|
/*****************************************************************************
|
|
* Third party dependencies report *
|
|
*****************************************************************************/
|
|
|
|
// Concatenates the dependencies CSV files into a single file
|
|
tasks.register("generateDependenciesReport", ConcatFilesTask) {
|
|
files = configurations.dependencyInfos
|
|
headerLine = "name,version,url,license,sourceURL"
|
|
target = new File(providers.systemProperty('csv')
|
|
.orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv")
|
|
.get()
|
|
)
|
|
// explicitly add our dependency on the JDK
|
|
String jdkVersion = VersionProperties.versions.get('bundled_jdk').split('@')[0]
|
|
String jdkMajorVersion = jdkVersion.split('[+.]')[0]
|
|
String sourceUrl = "https://github.com/openjdk/jdk${jdkMajorVersion}u/archive/refs/tags/jdk-${jdkVersion}.tar.gz"
|
|
additionalLines << "OpenJDK,${jdkVersion},https://openjdk.java.net/,GPL-2.0-with-classpath-exception,${sourceUrl}".toString()
|
|
|
|
// Explicitly add the dependency on the RHEL UBI Docker base image
|
|
String[] rhelUbiFields = [
|
|
'Red Hat Universal Base Image minimal',
|
|
'8',
|
|
'https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8',
|
|
'Custom;https://www.redhat.com/licenses/EULA_Red_Hat_Universal_Base_Image_English_20190422.pdf',
|
|
'https://oss-dependencies.elastic.co/red-hat-universal-base-image-minimal/8/ubi-minimal-8-source.tar.gz'
|
|
]
|
|
additionalLines << rhelUbiFields.join(',')
|
|
}
|
|
|
|
/*****************************************************************************
|
|
* Notice file *
|
|
*****************************************************************************/
|
|
|
|
// integ test zip only uses server, so a different notice file is needed there
|
|
def buildServerNoticeTaskProvider = tasks.register("buildServerNotice", NoticeTask)
|
|
|
|
// other distributions include notices from modules as well, which are added below later
|
|
def buildDefaultNoticeTaskProvider = tasks.register("buildDefaultNotice", NoticeTask) {
|
|
licensesDir new File(project(':distribution').projectDir, 'licenses')
|
|
}
|
|
|
|
// The :server and :libs projects belong to all distributions
|
|
tasks.withType(NoticeTask).configureEach {
|
|
licensesDir project(':server').file('licenses')
|
|
source project(':server').file('src/main/java')
|
|
project(':libs').subprojects.each { Project lib ->
|
|
licensesDir lib.file('licenses')
|
|
source lib.file('src/main/java')
|
|
}
|
|
}
|
|
|
|
/*****************************************************************************
|
|
* Modules *
|
|
*****************************************************************************/
|
|
String defaultOutputs = 'build/outputs/default'
|
|
String systemdOutputs = 'build/outputs/systemd'
|
|
String integTestOutputs = 'build/outputs/integ-test-only'
|
|
String externalTestOutputs = 'build/outputs/external-test'
|
|
|
|
def processDefaultOutputsTaskProvider = tasks.register("processDefaultOutputs", Sync) {
|
|
into defaultOutputs
|
|
}
|
|
|
|
def processSystemdOutputsTaskProvider = tasks.register("processSystemdOutputs", Sync) {
|
|
into systemdOutputs
|
|
}
|
|
|
|
def processExternalTestOutputsTaskProvider = tasks.register("processExternalTestOutputs", Sync) {
|
|
into externalTestOutputs
|
|
}
|
|
|
|
// Integ tests work over the rest http layer, so we need a transport included with the integ test zip.
|
|
// All transport modules are included so that they may be randomized for testing
|
|
def processIntegTestOutputsTaskProvider = tasks.register("processIntegTestOutputs", Sync) {
|
|
into integTestOutputs
|
|
}
|
|
|
|
def integTestConfigFiles = fileTree("${integTestOutputs}/config") {
|
|
builtBy processIntegTestOutputsTaskProvider
|
|
}
|
|
|
|
def integTestBinFiles = fileTree("${integTestOutputs}/bin") {
|
|
builtBy processIntegTestOutputsTaskProvider
|
|
}
|
|
|
|
def defaultModulesFiles = fileTree("${defaultOutputs}/modules") {
|
|
builtBy processDefaultOutputsTaskProvider
|
|
}
|
|
|
|
def defaultBinFiles = fileTree("${defaultOutputs}/bin") {
|
|
builtBy processDefaultOutputsTaskProvider
|
|
}
|
|
def defaultConfigFiles = fileTree("${defaultOutputs}/config") {
|
|
builtBy processDefaultOutputsTaskProvider
|
|
}
|
|
|
|
def systemdModuleFiles = fileTree("${systemdOutputs}/modules") {
|
|
builtBy processSystemdOutputsTaskProvider
|
|
}
|
|
|
|
def buildIntegTestModulesTaskProvider = tasks.register("buildIntegTestModules") {
|
|
dependsOn processIntegTestOutputsTaskProvider
|
|
outputs.dir "${integTestOutputs}/modules"
|
|
}
|
|
|
|
def buildExternalTestModulesTaskProvider = tasks.register("buildExternalTestModules") {
|
|
dependsOn "processExternalTestOutputs"
|
|
outputs.dir "${externalTestOutputs}/modules"
|
|
}
|
|
|
|
def buildDefaultLog4jConfigTaskProvider = tasks.register("buildDefaultLog4jConfig") {
|
|
mustRunAfter('processDefaultOutputs')
|
|
|
|
def outputFile = file("${defaultOutputs}/log4j2.properties")
|
|
def inputFiles = fileTree('src/config').matching { include 'log4j2.properties' }
|
|
project(':modules').subprojects.each {
|
|
inputFiles = inputFiles + it.fileTree('src/main/config').matching { include 'log4j2.properties' }
|
|
}
|
|
project(':x-pack:plugin').subprojects.each {
|
|
inputFiles = inputFiles + it.fileTree('src/main/config').matching { include 'log4j2.properties' }
|
|
}
|
|
|
|
inputs.files(inputFiles)
|
|
outputs.file outputFile
|
|
|
|
doLast {
|
|
outputFile.setText('', 'UTF-8')
|
|
inputFiles.files.eachWithIndex(
|
|
{ f, i ->
|
|
if (i != 0) {
|
|
outputFile.append('\n\n', 'UTF-8')
|
|
}
|
|
outputFile.append(f.text, 'UTF-8')
|
|
}
|
|
)
|
|
}
|
|
}
|
|
|
|
ext.restTestExpansions = [
|
|
'expected.modules.count': 0
|
|
]
|
|
// we create the buildOssModules task above but fill it here so we can do a single
|
|
// loop over modules to also setup cross task dependencies and increment our modules counter
|
|
project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each { Project module ->
|
|
if (module.name == 'systemd') {
|
|
// the systemd module is only included in the package distributions
|
|
return
|
|
}
|
|
File licenses = new File(module.projectDir, 'licenses')
|
|
if (licenses.exists()) {
|
|
buildDefaultNoticeTaskProvider.configure {
|
|
licensesDir licenses
|
|
source module.file('src/main/java')
|
|
}
|
|
}
|
|
|
|
distro.copyModule(processDefaultOutputsTaskProvider, module)
|
|
dependencies.add('featuresMetadata', module)
|
|
if (module.name.startsWith('transport-') || (buildParams.snapshotBuild == false && module.name == 'apm')) {
|
|
distro.copyModule(processIntegTestOutputsTaskProvider, module)
|
|
}
|
|
|
|
restTestExpansions['expected.modules.count'] += 1
|
|
}
|
|
|
|
// use licenses from each of the bundled xpack plugins
|
|
Project xpack = project(':x-pack:plugin')
|
|
xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule ->
|
|
File licenses = new File(xpackModule.projectDir, 'licenses')
|
|
if (licenses.exists()) {
|
|
buildDefaultNoticeTaskProvider.configure {
|
|
licensesDir licenses
|
|
source xpackModule.file('src/main/java')
|
|
}
|
|
}
|
|
distro.copyModule(processDefaultOutputsTaskProvider, xpackModule)
|
|
dependencies.add('featuresMetadata', xpackModule)
|
|
if (xpackModule.name.equals('core') || xpackModule.name.equals('security')) {
|
|
distro.copyModule(processIntegTestOutputsTaskProvider, xpackModule)
|
|
}
|
|
}
|
|
|
|
distro.copyModule(processSystemdOutputsTaskProvider, project(':modules:systemd'))
|
|
|
|
project(':test:external-modules').subprojects.each { Project testModule ->
|
|
distro.copyModule(processExternalTestOutputsTaskProvider, testModule)
|
|
}
|
|
|
|
configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|
|
|
apply plugin: 'elasticsearch.jdk-download'
|
|
apply plugin: 'elasticsearch.repositories'
|
|
|
|
// Setup all required JDKs
|
|
project.jdks {
|
|
['darwin', 'windows', 'linux'].each { platform ->
|
|
(platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64'] : ['x64']).each { architecture ->
|
|
"bundled_${platform}_${architecture}" {
|
|
it.platform = platform
|
|
it.version = VersionProperties.bundledJdkVersion
|
|
it.vendor = VersionProperties.bundledJdkVendor
|
|
it.architecture = architecture
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run...
|
|
/*****************************************************************************
|
|
* Properties to expand when copying packaging files *
|
|
*****************************************************************************/
|
|
configurations {
|
|
['libs', 'libsVersionChecker', 'libsCliLauncher', 'libsServerCli', 'libsWindowsServiceCli', 'libsPluginCli', 'libsKeystoreCli', 'libsSecurityCli', 'libsGeoIpCli', 'libsAnsiConsole', 'libsNative'].each {
|
|
create(it) {
|
|
canBeConsumed = false
|
|
canBeResolved = true
|
|
attributes {
|
|
attribute(Category.CATEGORY_ATTRIBUTE, objects.named(Category, Category.LIBRARY))
|
|
attribute(Usage.USAGE_ATTRIBUTE, objects.named(Usage, Usage.JAVA_RUNTIME))
|
|
attribute(Bundling.BUNDLING_ATTRIBUTE, objects.named(Bundling, Bundling.EXTERNAL))
|
|
}
|
|
}
|
|
}
|
|
all {
|
|
resolutionStrategy.dependencySubstitution {
|
|
substitute module("org.apache.logging.log4j:log4j-core") using project(":libs:elasticsearch-log4j") because "patched to remove JndiLookup clas"}
|
|
}
|
|
}
|
|
|
|
dependencies {
|
|
libs project(':server')
|
|
|
|
libsVersionChecker project(':distribution:tools:java-version-checker')
|
|
libsCliLauncher project(':distribution:tools:cli-launcher')
|
|
libsServerCli project(':distribution:tools:server-cli')
|
|
libsWindowsServiceCli project(':distribution:tools:windows-service-cli')
|
|
libsAnsiConsole project(':distribution:tools:ansi-console')
|
|
libsPluginCli project(':distribution:tools:plugin-cli')
|
|
libsKeystoreCli project(path: ':distribution:tools:keystore-cli')
|
|
libsSecurityCli project(':x-pack:plugin:security:cli')
|
|
libsGeoIpCli project(':distribution:tools:geoip-cli')
|
|
libsNative project(':libs:elasticsearch-native:elasticsearch-native-libraries')
|
|
}
|
|
|
|
project.ext {
|
|
|
|
/*****************************************************************************
|
|
* Common files in all distributions *
|
|
*****************************************************************************/
|
|
libFiles = { os, architecture ->
|
|
copySpec {
|
|
// Delay by using closures, since they have not yet been configured, so no jar task exists yet.
|
|
from(configurations.libs)
|
|
into('java-version-checker') {
|
|
from(configurations.libsVersionChecker)
|
|
}
|
|
into('cli-launcher') {
|
|
from(configurations.libsCliLauncher)
|
|
}
|
|
into('tools/server-cli') {
|
|
from(configurations.libsServerCli)
|
|
}
|
|
into('tools/windows-service-cli') {
|
|
from(configurations.libsWindowsServiceCli)
|
|
}
|
|
into('tools/geoip-cli') {
|
|
from(configurations.libsGeoIpCli)
|
|
}
|
|
into('tools/plugin-cli') {
|
|
from(configurations.libsPluginCli)
|
|
}
|
|
into('tools/keystore-cli') {
|
|
from(configurations.libsKeystoreCli)
|
|
}
|
|
into('tools/security-cli') {
|
|
from(configurations.libsSecurityCli)
|
|
}
|
|
into('tools/ansi-console') {
|
|
from(configurations.libsAnsiConsole)
|
|
}
|
|
into('platform') {
|
|
from(configurations.libsNative)
|
|
if (os != null) {
|
|
include (os + '-' + architecture + '/*')
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
modulesFiles = { os, architecture ->
|
|
copySpec {
|
|
eachFile {
|
|
if (it.relativePath.segments[-2] == 'bin' || (os == 'darwin' && it.relativePath.segments[-2] == 'MacOS')) {
|
|
// bin files, wherever they are within modules (eg platform specific) should be executable
|
|
// and MacOS is an alternative to bin on macOS
|
|
it.permissions.unix(0755)
|
|
} else {
|
|
it.permissions.unix(0644)
|
|
}
|
|
}
|
|
List excludePlatforms = ['linux-x86_64', 'linux-aarch64', 'windows-x86_64', 'darwin-x86_64', 'darwin-aarch64']
|
|
if (os != null) {
|
|
String platform = os + '-' + architecture
|
|
if (architecture == 'x64') {
|
|
// ML platform dir uses the x86_64 nomenclature
|
|
platform = os + '-x86_64'
|
|
}
|
|
excludePlatforms.remove(excludePlatforms.indexOf(platform))
|
|
} else {
|
|
excludePlatforms = []
|
|
}
|
|
from(defaultModulesFiles) {
|
|
// geo registers the geo_shape mapper that is overridden by
|
|
// the geo_shape mapper registered in the x-pack-spatial plugin
|
|
exclude "**/geo/**"
|
|
|
|
for (String excludePlatform : excludePlatforms) {
|
|
exclude "**/platform/${excludePlatform}/**"
|
|
}
|
|
}
|
|
if (buildParams.getSnapshotBuild()) {
|
|
from(buildExternalTestModulesTaskProvider)
|
|
}
|
|
if (project.path.startsWith(':distribution:packages')) {
|
|
from(systemdModuleFiles)
|
|
}
|
|
}
|
|
}
|
|
|
|
integTestModulesFiles = copySpec {
|
|
from buildIntegTestModulesTaskProvider
|
|
}
|
|
|
|
configFiles = { distributionType, isTestDistro ->
|
|
copySpec {
|
|
with copySpec {
|
|
// main config files, processed with distribution specific substitutions
|
|
from '../src/config'
|
|
exclude 'log4j2.properties' // this is handled separately below
|
|
filter("tokens" : expansionsForDistribution(distributionType, isTestDistro), ReplaceTokens.class)
|
|
}
|
|
from buildDefaultLog4jConfigTaskProvider
|
|
from isTestDistro ? integTestConfigFiles : defaultConfigFiles
|
|
}
|
|
}
|
|
|
|
binFiles = { distributionType, testDistro ->
|
|
copySpec {
|
|
// non-windows files, for all distributions
|
|
with copySpec {
|
|
from '../src/bin'
|
|
exclude '*.exe'
|
|
exclude '*.bat'
|
|
eachFile {
|
|
it.permissions{
|
|
unix(0755)
|
|
}
|
|
}
|
|
filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class)
|
|
}
|
|
// windows files, only for zip
|
|
if (distributionType == 'zip') {
|
|
with copySpec {
|
|
from '../src/bin'
|
|
include '*.bat'
|
|
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
|
|
filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class)
|
|
}
|
|
with copySpec {
|
|
from '../src/bin'
|
|
include '*.exe'
|
|
}
|
|
}
|
|
// module provided bin files
|
|
with copySpec {
|
|
eachFile { it.permissions.unix(0755) }
|
|
from(testDistro ? integTestBinFiles : defaultBinFiles)
|
|
if (distributionType != 'zip') {
|
|
exclude '*.bat'
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
noticeFile = { testDistro ->
|
|
copySpec {
|
|
if (testDistro) {
|
|
from buildServerNoticeTaskProvider
|
|
} else {
|
|
from (buildDefaultNoticeTaskProvider) {
|
|
filePermissions {
|
|
unix(0644)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
jdkFiles = { Project project, String os, String architecture ->
|
|
return copySpec {
|
|
from project.jdks."bundled_${os}_${architecture}"
|
|
exclude "demo/**"
|
|
/*
|
|
* The Contents/MacOS directory interferes with notarization, and is unused by our distribution, so we exclude
|
|
* it from the build.
|
|
*/
|
|
if ("darwin".equals(os)) {
|
|
exclude "Contents/MacOS"
|
|
}
|
|
eachFile { FileCopyDetails details ->
|
|
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
|
|
details.permissions {
|
|
unix(0755)
|
|
}
|
|
} else {
|
|
details.permissions {
|
|
unix(0644)
|
|
}
|
|
}
|
|
if (details.name == 'src.zip') {
|
|
details.exclude()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Build some variables that are replaced in the packages. This includes both
|
|
* scripts like bin/elasticsearch and bin/elasticsearch-plugin that a user might run and also
|
|
* scripts like postinst which are run as part of the installation.
|
|
*
|
|
* <dl>
|
|
* <dt>package.name</dt>
|
|
* <dd>The name of the project. Its sprinkled throughout the scripts.</dd>
|
|
* <dt>package.version</dt>
|
|
* <dd>The version of the project. Its mostly used to find the exact jar name.
|
|
* </dt>
|
|
* <dt>path.conf</dt>
|
|
* <dd>The default directory from which to load configuration. This is used in
|
|
* the packaging scripts, but in that context it is always
|
|
* /etc/elasticsearch. Its also used in bin/elasticsearch-plugin, where it is
|
|
* /etc/elasticsearch for the os packages but $ESHOME/config otherwise.</dd>
|
|
* <dt>path.env</dt>
|
|
* <dd>The env file sourced before bin/elasticsearch to set environment
|
|
* variables. Think /etc/defaults/elasticsearch.</dd>
|
|
* <dt>scripts.footer</dt>
|
|
* <dd>Footer appended to control scripts embedded in the distribution that is
|
|
* (almost) entirely there for cosmetic reasons.</dd>
|
|
* </dl>
|
|
*/
|
|
subprojects {
|
|
ext.expansionsForDistribution = { distributionType, isTestDistro ->
|
|
final String packagingPathData = "path.data: /var/lib/elasticsearch"
|
|
final String pathLogs = "/var/log/elasticsearch"
|
|
final String packagingPathLogs = "path.logs: ${pathLogs}"
|
|
final String packagingLoggc = "${pathLogs}/gc.log"
|
|
|
|
String licenseText
|
|
if (isTestDistro) {
|
|
licenseText = layout.settingsDirectory.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile.getText('UTF-8')
|
|
} else {
|
|
licenseText = layout.settingsDirectory.file('licenses/ELASTIC-LICENSE-2.0.txt').asFile.getText('UTF-8')
|
|
}
|
|
// license text needs to be indented with a single space
|
|
licenseText = ' ' + licenseText.replace('\n', '\n ')
|
|
|
|
String footer = "# Built for ${project.name}-${project.version} " +
|
|
"(${distributionType})"
|
|
Map<String, Object> expansions = [
|
|
'project.name': project.name,
|
|
'project.version': version,
|
|
'project.minor.version': "${VersionProperties.elasticsearchVersion.major}.${VersionProperties.elasticsearchVersion.minor}",
|
|
|
|
'path.conf': [
|
|
'deb': '/etc/elasticsearch',
|
|
'rpm': '/etc/elasticsearch',
|
|
'def': '"$ES_HOME"/config'
|
|
],
|
|
'path.data': [
|
|
'deb': packagingPathData,
|
|
'rpm': packagingPathData,
|
|
'def': '#path.data: /path/to/data'
|
|
],
|
|
'path.env': [
|
|
'deb': '/etc/default/elasticsearch',
|
|
'rpm': '/etc/sysconfig/elasticsearch',
|
|
/* There isn't one of these files for tar or zip but its important to
|
|
make an empty string here so the script can properly skip it. */
|
|
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done',
|
|
],
|
|
'source.path.env': [
|
|
'deb': 'source /etc/default/elasticsearch',
|
|
'rpm': 'source /etc/sysconfig/elasticsearch',
|
|
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi',
|
|
],
|
|
'path.logs': [
|
|
'deb': packagingPathLogs,
|
|
'rpm': packagingPathLogs,
|
|
'def': '#path.logs: /path/to/logs'
|
|
],
|
|
'loggc': [
|
|
'deb': packagingLoggc,
|
|
'rpm': packagingLoggc,
|
|
'def': 'logs/gc.log'
|
|
],
|
|
|
|
'heap.dump.path': [
|
|
'deb': "-XX:HeapDumpPath=/var/lib/elasticsearch",
|
|
'rpm': "-XX:HeapDumpPath=/var/lib/elasticsearch",
|
|
'def': "-XX:HeapDumpPath=data"
|
|
],
|
|
|
|
'error.file': [
|
|
'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log",
|
|
'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log",
|
|
'def': "-XX:ErrorFile=logs/hs_err_pid%p.log"
|
|
],
|
|
|
|
'scripts.footer': [
|
|
/* Debian needs exit 0 on these scripts so we add it here and preserve
|
|
the pretty footer. */
|
|
'deb': "exit 0\n${footer}",
|
|
'def': footer
|
|
],
|
|
|
|
'es.distribution.type': [
|
|
'deb': 'deb',
|
|
'rpm': 'rpm',
|
|
'tar': 'tar',
|
|
'zip': 'zip'
|
|
],
|
|
|
|
'license.name': [
|
|
'deb': 'Elastic-License'
|
|
],
|
|
|
|
'license.text': [
|
|
'deb': licenseText,
|
|
],
|
|
]
|
|
Map<String, String> result = [:]
|
|
expansions.each { key, value ->
|
|
if (value instanceof Map) {
|
|
// 'def' is for default but its three characters like 'rpm' and 'deb'
|
|
value = value[distributionType] ?: value['def']
|
|
if (value == null) {
|
|
return
|
|
}
|
|
}
|
|
// expansions is String->Object but result is String->String, so we have to coerce the values
|
|
result[key] = value.toString()
|
|
}
|
|
return result
|
|
}
|
|
|
|
ext.assertLinesInFile = { Path path, List<String> expectedLines ->
|
|
final List<String> actualLines = Files.readAllLines(path)
|
|
int line = 0
|
|
for (final String expectedLine : expectedLines) {
|
|
final String actualLine = actualLines.get(line)
|
|
if (expectedLine != actualLine) {
|
|
throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]")
|
|
}
|
|
line++
|
|
}
|
|
}
|
|
}
|
|
|
|
['archives:windows-zip',
|
|
'archives:darwin-tar',
|
|
'archives:darwin-aarch64-tar',
|
|
'archives:linux-aarch64-tar',
|
|
'archives:linux-tar',
|
|
'archives:integ-test-zip',
|
|
'packages:rpm', 'packages:deb',
|
|
'packages:aarch64-rpm', 'packages:aarch64-deb',
|
|
].forEach { subName ->
|
|
Project subproject = project("${project.path}:${subName}")
|
|
Configuration configuration = configurations.create(subproject.name)
|
|
dependencies {
|
|
"${configuration.name}" project(path: subproject.path, configuration:'default')
|
|
}
|
|
}
|
|
|
|
// This artifact makes it possible for other projects to pull
|
|
// in the final log4j2.properties configuration, as it appears in the
|
|
// archive distribution.
|
|
artifacts.add('log4jConfig', file("${defaultOutputs}/log4j2.properties")) {
|
|
type = 'file'
|
|
name = 'log4j2.properties'
|
|
builtBy 'buildDefaultLog4jConfig'
|
|
}
|
|
|
|
tasks.named('resolveAllDependencies') {
|
|
// We don't want to build all our distribution artifacts when priming our dependency cache
|
|
configs = []
|
|
}
|