/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ import org.apache.tools.ant.filters.FixCrLfFilter import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.ConcatFilesTask import org.elasticsearch.gradle.internal.DependenciesInfoPlugin import org.elasticsearch.gradle.internal.NoticeTask import org.elasticsearch.gradle.internal.info.BuildParams import java.nio.file.Files import java.nio.file.Path plugins { id 'base' } configurations { log4jConfig dependencyInfos { attributes { attribute(Category.CATEGORY_ATTRIBUTE, project.getObjects().named(Category.class, Category.DOCUMENTATION)) } } } def thisProj = project rootProject.allprojects { proj -> proj.plugins.withType(DependenciesInfoPlugin) { thisProj.dependencies.add("dependencyInfos", project.dependencies.project(path: proj.path)) } } /***************************************************************************** * Third party dependencies report * *****************************************************************************/ // Concatenates the dependencies CSV files into a single file tasks.register("generateDependenciesReport", ConcatFilesTask) { files = configurations.dependencyInfos headerLine = "name,version,url,license,sourceURL" target = new File(providers.systemProperty('csv') .orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv") .get() ) // explicitly add our dependency on the JDK String jdkVersion = VersionProperties.versions.get('bundled_jdk').split('@')[0] String jdkMajorVersion = jdkVersion.split('[+.]')[0] String sourceUrl = "https://github.com/openjdk/jdk${jdkMajorVersion}u/archive/refs/tags/jdk-${jdkVersion}.tar.gz" additionalLines << "OpenJDK,${jdkVersion},https://openjdk.java.net/,GPL-2.0-with-classpath-exception,${sourceUrl}".toString() // Explicitly add the dependency on the RHEL UBI Docker base image String[] rhelUbiFields = [ 'Red Hat Universal Base Image minimal', '8', 'https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8', 'Custom;https://www.redhat.com/licenses/EULA_Red_Hat_Universal_Base_Image_English_20190422.pdf', 'https://oss-dependencies.elastic.co/red-hat-universal-base-image-minimal/8/ubi-minimal-8-source.tar.gz' ] additionalLines << rhelUbiFields.join(',') } /***************************************************************************** * Notice file * *****************************************************************************/ // integ test zip only uses server, so a different notice file is needed there def buildServerNoticeTaskProvider = tasks.register("buildServerNotice", NoticeTask) // other distributions include notices from modules as well, which are added below later def buildDefaultNoticeTaskProvider = tasks.register("buildDefaultNotice", NoticeTask) { licensesDir new File(project(':distribution').projectDir, 'licenses') } def buildDefaultNoJdkNoticeTaskProvider = tasks.register("buildDefaultNoJdkNotice", NoticeTask) def buildOssNoJdkNoticeTaskProvider = tasks.register("buildOssNoJdkNotice", NoticeTask) // The :server and :libs projects belong to all distributions tasks.withType(NoticeTask).configureEach { licensesDir project(':server').file('licenses') source project(':server').file('src/main/java') project(':libs').subprojects.each { Project lib -> licensesDir lib.file('licenses') source lib.file('src/main/java') } } /***************************************************************************** * Modules * *****************************************************************************/ String ossOutputs = 'build/outputs/oss' String defaultOutputs = 'build/outputs/default' String systemdOutputs = 'build/outputs/systemd' String integTestOutputs = 'build/outputs/integ-test' String externalTestOutputs = 'build/outputs/external-test' def processDefaultOutputsTaskProvider = tasks.register("processDefaultOutputs", Sync) { into defaultOutputs } def processSystemdOutputsTaskProvider = tasks.register("processSystemdOutputs", Sync) { into systemdOutputs } def processExternalTestOutputsTaskProvider = tasks.register("processExternalTestOutputs", Sync) { into externalTestOutputs } // Integ tests work over the rest http layer, so we need a transport included with the integ test zip. // All transport modules are included so that they may be randomized for testing // Additionally install security plugin so we can run tests with security enabled using this distribution def processIntegTestOutputsTaskProvider = tasks.register("processIntegTestOutputs", Sync) { into integTestOutputs } def defaultModulesFiles = fileTree("${defaultOutputs}/modules") { builtBy processDefaultOutputsTaskProvider } def defaultBinFiles = fileTree("${defaultOutputs}/bin") { builtBy processDefaultOutputsTaskProvider } def integTestBinFiles = fileTree("${integTestOutputs}/bin") { builtBy processIntegTestOutputsTaskProvider } def defaultConfigFiles = fileTree("${defaultOutputs}/config") { builtBy processDefaultOutputsTaskProvider } def integTestConfigFiles = fileTree("${integTestOutputs}/config") { builtBy processIntegTestOutputsTaskProvider } def systemdModuleFiles = fileTree("${systemdOutputs}/modules") { builtBy processSystemdOutputsTaskProvider } def buildIntegTestModulesTaskProvider = tasks.register("buildIntegTestModules") { dependsOn processIntegTestOutputsTaskProvider outputs.dir "${integTestOutputs}/modules" } def buildExternalTestModulesTaskProvider = tasks.register("buildExternalTestModules") { dependsOn "processExternalTestOutputs" outputs.dir "${externalTestOutputs}/modules" } Configuration moduleZip(Project module) { Dependency dep = project.dependencies.project(path: module.path, configuration: 'zip') Configuration config = project.configurations.detachedConfiguration(dep) return config } void copyModule(TaskProvider copyTask, Project module) { copyTask.configure { Configuration moduleConfig = moduleZip(module) dependsOn moduleConfig from({ zipTree(moduleConfig.singleFile) }) { includeEmptyDirs false // these are handled separately in the log4j config tasks below exclude '*/config/log4j2.properties' exclude 'config/log4j2.properties' eachFile { details -> String name = module.esplugin.name // Copy all non config/bin files // Note these might be unde a subdirectory in the case of a meta plugin if ((details.relativePath.pathString ==~ /([^\/]+\/)?(config|bin)\/.*/) == false) { details.relativePath = details.relativePath.prepend('modules', name) } else if ((details.relativePath.pathString ==~ /([^\/]+\/)(config|bin)\/.*/)) { // this is the meta plugin case, in which we need to remove the intermediate dir String[] segments = details.relativePath.segments details.relativePath = new RelativePath(true, segments.takeRight(segments.length - 1)) } } } } } def buildDefaultLog4jConfigTaskProvider = tasks.register("buildDefaultLog4jConfig") { mustRunAfter('processDefaultOutputs') def outputFile = file("${defaultOutputs}/log4j2.properties") def inputFiles = fileTree('src/config').matching { include 'log4j2.properties' } project(':modules').subprojects.each { inputFiles = inputFiles + it.fileTree('src/main/config').matching { include 'log4j2.properties' } } project(':x-pack:plugin').subprojects.each { inputFiles = inputFiles + it.fileTree('src/main/config').matching { include 'log4j2.properties' } } inputs.files(inputFiles) outputs.file outputFile doLast { outputFile.setText('', 'UTF-8') inputFiles.files.eachWithIndex( { f, i -> if (i != 0) { outputFile.append('\n\n', 'UTF-8') } outputFile.append(f.text, 'UTF-8') } ) } } ext.restTestExpansions = [ 'expected.modules.count': 0 ] // we create the buildOssModules task above but fill it here so we can do a single // loop over modules to also setup cross task dependencies and increment our modules counter project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each { Project module -> if (module.name == 'systemd') { // the systemd module is only included in the package distributions return } File licenses = new File(module.projectDir, 'licenses') if (licenses.exists()) { buildDefaultNoticeTaskProvider.configure { licensesDir licenses source module.file('src/main/java') } } copyModule(processDefaultOutputsTaskProvider, module) if (module.name.startsWith('transport-')) { copyModule(processIntegTestOutputsTaskProvider, module) } restTestExpansions['expected.modules.count'] += 1 } // use licenses from each of the bundled xpack plugins Project xpack = project(':x-pack:plugin') xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule -> File licenses = new File(xpackModule.projectDir, 'licenses') if (licenses.exists()) { buildDefaultNoticeTaskProvider.configure { licensesDir licenses source xpackModule.file('src/main/java') } } copyModule(processDefaultOutputsTaskProvider, xpackModule) if (xpackModule.name.equals('core') || xpackModule.name.equals('security')) { copyModule(processIntegTestOutputsTaskProvider, xpackModule) } } copyModule(processSystemdOutputsTaskProvider, project(':modules:systemd')) project(':test:external-modules').subprojects.each { Project testModule -> copyModule(processExternalTestOutputsTaskProvider, testModule) } configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { apply plugin: 'elasticsearch.jdk-download' apply plugin: 'elasticsearch.repositories' // Setup all required JDKs project.jdks { ['darwin', 'windows', 'linux'].each { platform -> (platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64'] : ['x64']).each { architecture -> "bundled_${platform}_${architecture}" { it.platform = platform it.version = VersionProperties.bundledJdkVersion it.vendor = VersionProperties.bundledJdkVendor it.architecture = architecture } } } } // TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run... /***************************************************************************** * Properties to expand when copying packaging files * *****************************************************************************/ configurations { ['libs', 'libsPluginCli', 'libsKeystoreCli', 'libsSecurityCli', 'libsGeoIpCli'].each { create(it) { canBeConsumed = false canBeResolved = true attributes { attribute(Category.CATEGORY_ATTRIBUTE, objects.named(Category, Category.LIBRARY)) attribute(Usage.USAGE_ATTRIBUTE, objects.named(Usage, Usage.JAVA_RUNTIME)) attribute(Bundling.BUNDLING_ATTRIBUTE, objects.named(Bundling, Bundling.EXTERNAL)) } } } all { resolutionStrategy.dependencySubstitution { substitute module("org.apache.logging.log4j:log4j-core") using project(":libs:elasticsearch-log4j") because "patched to remove JndiLookup clas"} } } dependencies { libs project(':server') libs project(':distribution:tools:java-version-checker') libs project(':distribution:tools:launchers') libsPluginCli project(':distribution:tools:plugin-cli') libsKeystoreCli project(path: ':distribution:tools:keystore-cli') libsSecurityCli project(':x-pack:plugin:security:cli') libsGeoIpCli project(':distribution:tools:geoip-cli') } project.ext { /***************************************************************************** * Common files in all distributions * *****************************************************************************/ libFiles = { testDistro -> copySpec { // Delay by using closures, since they have not yet been configured, so no jar task exists yet. from(configurations.libs) into('tools/geoip-cli') { from(configurations.libsGeoIpCli) } into('tools/plugin-cli') { from(configurations.libsPluginCli) } into('tools/keystore-cli') { from(configurations.libsKeystoreCli) } if (testDistro == false) { into('tools/security-cli') { from(configurations.libsSecurityCli) } } } } modulesFiles = { platform -> copySpec { eachFile { if (it.relativePath.segments[-2] == 'bin' || ((platform == 'darwin-x86_64' || platform == 'darwin-aarch64') && it.relativePath.segments[-2] == 'MacOS')) { // bin files, wherever they are within modules (eg platform specific) should be executable // and MacOS is an alternative to bin on macOS it.mode = 0755 } else { it.mode = 0644 } } List excludePlatforms = ['linux-x86_64', 'linux-aarch64', 'windows-x86_64', 'darwin-x86_64', 'darwin-aarch64'] if (platform != null) { excludePlatforms.remove(excludePlatforms.indexOf(platform)) } else { excludePlatforms = [] } from(defaultModulesFiles) { // geo registers the geo_shape mapper that is overridden by // the geo_shape mapper registered in the x-pack-spatial plugin exclude "**/geo/**" for (String excludePlatform : excludePlatforms) { exclude "**/platform/${excludePlatform}/**" } } if (BuildParams.isSnapshotBuild()) { from(buildExternalTestModulesTaskProvider) } if (project.path.startsWith(':distribution:packages')) { from(systemdModuleFiles) } } } integTestModulesFiles = copySpec { from buildIntegTestModulesTaskProvider } configFiles = { distributionType, testDistro, jdk -> copySpec { with copySpec { // main config files, processed with distribution specific substitutions from '../src/config' exclude 'log4j2.properties' // this is handled separately below filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class) } from buildDefaultLog4jConfigTaskProvider from testDistro ? integTestConfigFiles : defaultConfigFiles } } binFiles = { distributionType, testDistro, jdk -> copySpec { // non-windows files, for all distributions with copySpec { from '../src/bin' exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class) } // windows files, only for zip if (distributionType == 'zip') { with copySpec { from '../src/bin' include '*.bat' filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class) } with copySpec { from '../src/bin' include '*.exe' } } // module provided bin files with copySpec { eachFile { it.setMode(0755) } from(testDistro ? integTestBinFiles : defaultBinFiles) if (distributionType != 'zip') { exclude '*.bat' } } } } noticeFile = { testDistro, jdk -> copySpec { if (testDistro) { from buildServerNoticeTaskProvider } else { if (jdk) { from(buildDefaultNoticeTaskProvider) { fileMode = 0644 } } else { from(buildDefaultNoJdkNoticeTaskProvider) { fileMode = 0644 } } } } } jdkFiles = { Project project, String platform, String architecture -> return copySpec { from project.jdks."bundled_${platform}_${architecture}" exclude "demo/**" /* * The Contents/MacOS directory interferes with notarization, and is unused by our distribution, so we exclude * it from the build. */ if ("darwin".equals(platform)) { exclude "Contents/MacOS" } eachFile { FileCopyDetails details -> if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 } if (details.name == 'src.zip') { details.exclude() } } } } } } /** * Build some variables that are replaced in the packages. This includes both * scripts like bin/elasticsearch and bin/elasticsearch-plugin that a user might run and also * scripts like postinst which are run as part of the installation. * *
*
package.name
*
The name of the project. Its sprinkled throughout the scripts.
*
package.version
*
The version of the project. Its mostly used to find the exact jar name. * *
path.conf
*
The default directory from which to load configuration. This is used in * the packaging scripts, but in that context it is always * /etc/elasticsearch. Its also used in bin/elasticsearch-plugin, where it is * /etc/elasticsearch for the os packages but $ESHOME/config otherwise.
*
path.env
*
The env file sourced before bin/elasticsearch to set environment * variables. Think /etc/defaults/elasticsearch.
*
scripts.footer
*
Footer appended to control scripts embedded in the distribution that is * (almost) entirely there for cosmetic reasons.
*
stopping.timeout
*
RPM's init script needs to wait for elasticsearch to stop before * returning from stop and it needs a maximum time to wait. This is it. One * day. DEB retries forever.
*
*/ subprojects { ext.expansionsForDistribution = { distributionType, testDistro, jdk -> final String packagingPathData = "path.data: /var/lib/elasticsearch" final String pathLogs = "/var/log/elasticsearch" final String packagingPathLogs = "path.logs: ${pathLogs}" final String packagingLoggc = "${pathLogs}/gc.log" String licenseText if (testDistro) { licenseText = rootProject.file('licenses/SSPL-1.0+ELASTIC-LICENSE-2.0.txt').getText('UTF-8') } else { licenseText = rootProject.file('licenses/ELASTIC-LICENSE-2.0.txt').getText('UTF-8') } // license text needs to be indented with a single space licenseText = ' ' + licenseText.replace('\n', '\n ') String footer = "# Built for ${project.name}-${project.version} " + "(${distributionType})" Map expansions = [ 'project.name': project.name, 'project.version': version, 'project.minor.version': "${VersionProperties.elasticsearchVersion.major}.${VersionProperties.elasticsearchVersion.minor}", 'path.conf': [ 'deb': '/etc/elasticsearch', 'rpm': '/etc/elasticsearch', 'def': '"$ES_HOME"/config' ], 'path.data': [ 'deb': packagingPathData, 'rpm': packagingPathData, 'def': '#path.data: /path/to/data' ], 'path.env': [ 'deb': '/etc/default/elasticsearch', 'rpm': '/etc/sysconfig/elasticsearch', /* There isn't one of these files for tar or zip but its important to make an empty string here so the script can properly skip it. */ 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done', ], 'source.path.env': [ 'deb': 'source /etc/default/elasticsearch', 'rpm': 'source /etc/sysconfig/elasticsearch', 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi', ], 'path.logs': [ 'deb': packagingPathLogs, 'rpm': packagingPathLogs, 'def': '#path.logs: /path/to/logs' ], 'loggc': [ 'deb': packagingLoggc, 'rpm': packagingLoggc, 'def': 'logs/gc.log' ], 'heap.dump.path': [ 'deb': "-XX:HeapDumpPath=/var/lib/elasticsearch", 'rpm': "-XX:HeapDumpPath=/var/lib/elasticsearch", 'def': "-XX:HeapDumpPath=data" ], 'error.file': [ 'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", 'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", 'def': "-XX:ErrorFile=logs/hs_err_pid%p.log" ], 'stopping.timeout': [ 'rpm': 86400, ], 'scripts.footer': [ /* Debian needs exit 0 on these scripts so we add it here and preserve the pretty footer. */ 'deb': "exit 0\n${footer}", 'def': footer ], 'es.distribution.flavor': [ 'def': testDistro ? 'oss' : 'default' ], 'es.distribution.type': [ 'deb': 'deb', 'rpm': 'rpm', 'tar': 'tar', 'zip': 'zip' ], 'es.bundled_jdk': [ 'def': jdk ? 'true' : 'false' ], 'license.name': [ 'deb': testDistro ? 'ASL-2.0' : 'Elastic-License' ], 'license.text': [ 'deb': licenseText, ], ] Map result = [:] expansions.each { key, value -> if (value instanceof Map) { // 'def' is for default but its three characters like 'rpm' and 'deb' value = value[distributionType] ?: value['def'] if (value == null) { return } } // expansions is String->Object but result is String->String, so we have to coerce the values result[key] = value.toString() } return result } ext.assertLinesInFile = { Path path, List expectedLines -> final List actualLines = Files.readAllLines(path) int line = 0 for (final String expectedLine : expectedLines) { final String actualLine = actualLines.get(line) if (expectedLine != actualLine) { throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]") } line++ } } } ['archives:windows-zip', 'archives:darwin-tar', 'archives:darwin-aarch64-tar', 'archives:linux-aarch64-tar', 'archives:linux-tar', 'archives:integ-test-zip', 'packages:rpm', 'packages:deb', 'packages:aarch64-rpm', 'packages:aarch64-deb', ].forEach { subName -> Project subproject = project("${project.path}:${subName}") Configuration configuration = configurations.create(subproject.name) dependencies { "${configuration.name}" project(path: subproject.path, configuration:'default') } } // This artifact makes it possible for other projects to pull // in the final log4j2.properties configuration, as it appears in the // archive distribution. artifacts.add('log4jConfig', file("${defaultOutputs}/log4j2.properties")) { type 'file' name 'log4j2.properties' builtBy 'buildDefaultLog4jConfig' } tasks.named('resolveAllDependencies') { // We don't want to build all our distribution artifacts when priming our dependency cache configs = [] }