Remove MavenFilteringHack (#73637) (#75550)

Co-authored-by: Jake Landis <jake.landis@elastic.co>
This commit is contained in:
Joe Gallo 2021-07-21 09:25:36 -04:00 committed by GitHub
parent d02a481f1f
commit ce98a62427
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 160 additions and 177 deletions

View file

@ -116,7 +116,7 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
result.task(':copyYamlTestsTask').outcome == TaskOutcome.NO_SOURCE
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiFoo).exists()
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiXpackFoo).exists()
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiBar).exists() ==false
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiBar).exists() == false
file("/build/restResources/yamlSpecs/rest-api-spec/api/" + apiXpackBar).exists() == false
}
@ -136,6 +136,10 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
includeXpack 'bar'
}
}
tasks.named("copyYamlTestsTask").configure {
it.substitutions = [ 'replacedValue' : 'replacedWithValue' ]
}
"""
String apiCore1 = "foo1.json"
String apiCore2 = "foo2.json"
@ -143,6 +147,10 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
String coreTest = "foo/10_basic.yml"
String xpackTest = "bar/10_basic.yml"
setupRestResources([apiCore1, apiCore2, apiXpack], [coreTest], [xpackTest])
// drop a value to replace from expansions above into a test file
file("rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/" + coreTest) << "@replacedValue@"
// intentionally not adding tests to project, they will be copied over via the plugin
// this tests that the test copy happens before the api copy since the api copy will only trigger if there are tests in the project
@ -158,6 +166,9 @@ class RestResourcesPluginFuncTest extends AbstractRestResourcesFuncTest {
file("/build/restResources/yamlTests/rest-api-spec/test/" + coreTest).exists()
file("/build/restResources/yamlTests/rest-api-spec/test/" + xpackTest).exists()
// confirm that replacement happened
file("/build/restResources/yamlTests/rest-api-spec/test/" + coreTest).getText("UTF-8") == "replacedWithValue"
when:
result = gradleRunner("copyRestApiSpecsTask").build()

View file

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.gradle.internal;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.tools.ant.filters.ReplaceTokens;
import org.gradle.api.file.CopySpec;
/**
* Gradle provides "expansion" functionality using groovy's SimpleTemplatingEngine (TODO: check name).
* However, it allows substitutions of the form {@code $foo} (no curlies). Rest tests provide
* some substitution from the test runner, which this form is used for.
*
* This class provides a helper to do maven filtering, where only the form {@code $\{foo\}} is supported.
*
* TODO: we should get rid of this hack, and make the rest tests use some other identifier
* for builtin vars
*/
public class MavenFilteringHack {
/**
* Adds a filter to the given copy spec that will substitute maven variables.
*
*/
static void filter(CopySpec copySpec, Map<Object, Object> substitutions) {
Map<String, String> mavenSubstitutions = new LinkedHashMap<>();
Map<String, Object> argMap = new LinkedHashMap<>();
substitutions.forEach((k, v) -> mavenSubstitutions.put("{" + k.toString(), v.toString()));
argMap.put("tokens", mavenSubstitutions);
argMap.put("beginToken", "$");
argMap.put("endToken", "}");
copySpec.filter(argMap, ReplaceTokens.class);
}
}

View file

@ -7,8 +7,8 @@
*/
package org.elasticsearch.gradle.internal.test.rest;
import org.apache.tools.ant.filters.ReplaceTokens;
import org.gradle.api.DefaultTask;
import org.gradle.api.file.ArchiveOperations;
import org.gradle.api.file.DirectoryProperty;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileSystemOperations;
@ -18,7 +18,7 @@ import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.ListProperty;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Optional;
import org.gradle.api.tasks.OutputDirectory;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.TaskAction;
@ -26,10 +26,11 @@ import org.gradle.api.tasks.util.PatternFilterable;
import org.gradle.api.tasks.util.PatternSet;
import org.gradle.internal.Factory;
import javax.inject.Inject;
import java.io.File;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.inject.Inject;
import static org.elasticsearch.gradle.util.GradleUtils.getProjectPathFromTask;
@ -44,6 +45,7 @@ public class CopyRestTestsTask extends DefaultTask {
private static final String REST_TEST_PREFIX = "rest-api-spec/test";
private final ListProperty<String> includeCore;
private final ListProperty<String> includeXpack;
private Map<String, String> substitutions;
private final DirectoryProperty outputResourceDir;
private FileCollection coreConfig;
@ -84,6 +86,16 @@ public class CopyRestTestsTask extends DefaultTask {
return includeXpack;
}
public void setSubstitutions(Map<String, String> substitutions) {
this.substitutions = substitutions;
}
@Input
@Optional
public Map<String, String> getSubstitutions() {
return substitutions;
}
@SkipWhenEmpty
@InputFiles
public FileTree getInputDir() {
@ -127,6 +139,9 @@ public class CopyRestTestsTask extends DefaultTask {
c.from(coreConfigToFileTree.apply(coreConfig));
c.into(restTestOutputDir);
c.include(corePatternSet.getIncludes());
if (substitutions != null) {
c.filter(Map.of("tokens", substitutions), ReplaceTokens.class);
}
});
}
// only copy x-pack tests if explicitly instructed
@ -136,6 +151,9 @@ public class CopyRestTestsTask extends DefaultTask {
c.from(xpackConfigToFileTree.apply(xpackConfig));
c.into(restTestOutputDir);
c.include(xpackPatternSet.getIncludes());
if (substitutions != null) {
c.filter(Map.of("tokens", substitutions), ReplaceTokens.class);
}
});
}
// copy any additional config
@ -143,6 +161,9 @@ public class CopyRestTestsTask extends DefaultTask {
fileSystemOperations.copy(c -> {
c.from(additionalConfigToFileTree.apply(additionalConfig));
c.into(restTestOutputDir);
if (substitutions != null) {
c.filter(Map.of("tokens", substitutions), ReplaceTokens.class);
}
});
}
}

View file

@ -5,7 +5,8 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
@ -17,7 +18,7 @@ group = "org.elasticsearch.distribution.integ-test-zip"
tasks.named("processTestResources").configure {
inputs.properties(project(':distribution').restTestExpansions)
MavenFilteringHack.filter(it, project(':distribution').restTestExpansions)
filter("tokens" : project(':distribution').restTestExpansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}
// make the pom file name use elasticsearch instead of the project name

View file

@ -1,4 +1,4 @@
grant {
// Needed to read the log file
permission java.io.FilePermission "${tests.logfile}", "read";
permission java.io.FilePermission "@tests.logfile@", "read";
};

View file

@ -8,9 +8,9 @@
import org.apache.tools.ant.filters.FixCrLfFilter
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.ConcatFilesTask
import org.elasticsearch.gradle.internal.DependenciesInfoTask
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.elasticsearch.gradle.internal.NoticeTask
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.info.BuildParams
@ -194,7 +194,7 @@ def buildDefaultLog4jConfigTaskProvider = tasks.register("buildDefaultLog4jConfi
}
ext.restTestExpansions = [
'expected.modules.count': 0,
'expected.modules.count': 0
]
// we create the buildOssModules task above but fill it here so we can do a single
// loop over modules to also setup cross task dependencies and increment our modules counter
@ -357,7 +357,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
// main config files, processed with distribution specific substitutions
from '../src/config'
exclude 'log4j2.properties' // this is handled separately below
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, testDistro, jdk))
filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class)
}
from buildDefaultLog4jConfigTaskProvider
from defaultConfigFiles
@ -372,7 +372,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
exclude '*.exe'
exclude '*.bat'
eachFile { it.setMode(0755) }
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, testDistro, jdk))
filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class)
}
// windows files, only for zip
if (distributionType == 'zip') {
@ -380,7 +380,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
from '../src/bin'
include '*.bat'
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, testDistro, jdk))
filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class)
}
with copySpec {
from '../src/bin'
@ -569,7 +569,7 @@ subprojects {
],
]
Map<String, String> result = [:]
expansions = expansions.each { key, value ->
expansions.each { key, value ->
if (value instanceof Map) {
// 'def' is for default but its three characters like 'rpm' and 'deb'
value = value[distributionType] ?: value['def']
@ -577,7 +577,8 @@ subprojects {
return
}
}
result[key] = value
// expansions is String->Object but result is String->String, so we have to coerce the values
result[key] = value.toString()
}
return result
}

View file

@ -6,8 +6,8 @@
* Side Public License, v 1.
*/
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.elasticsearch.gradle.OS
import org.elasticsearch.gradle.internal.info.BuildParams
import org.redline_rpm.header.Flags
@ -33,7 +33,7 @@ import java.util.regex.Pattern
* empty directory requires more wits than I have.
* 3. ospackage really wants to suck up some of the debian control scripts
* directly from the filesystem. It doesn't want to process them through
* MavenFilteringHack or any other copy-style action.
* any copy-style action.
*
* The following commands are useful when it comes to check the user/group
* and files permissions set within the RPM and DEB packages:
@ -64,13 +64,14 @@ void addProcessFilesTask(String type, boolean oss, boolean jdk) {
with copySpec {
from 'src/common'
from "src/${type}"
MavenFilteringHack.filter(it, expansionsForDistribution(type, oss, jdk))
filter("tokens" : expansionsForDistribution(type, oss, jdk), ReplaceTokens.class)
}
into('etc/elasticsearch') {
with configFiles(type, oss, jdk)
}
MavenFilteringHack.filter(it, expansionsForDistribution(type, oss, jdk))
filter("tokens" : expansionsForDistribution(type, oss, jdk), ReplaceTokens.class)
doLast {
// create empty dirs, we set the permissions when configuring the packages

View file

@ -10,7 +10,7 @@
# Elasticsearch configuration directory
# Note: this setting will be shared with command-line tools
ES_PATH_CONF=${path.conf}
ES_PATH_CONF=@path.conf@
# Elasticsearch PID directory
#PID_DIR=/var/run/elasticsearch

View file

@ -9,11 +9,11 @@
# $1=1 : indicates an upgrade
# source the default env file
if [ -f "${path.env}" ]; then
. "${path.env}"
if [ -f "@path.env@" ]; then
. "@path.env@"
fi
export ES_PATH_CONF=${ES_PATH_CONF:-${path.conf}}
export ES_PATH_CONF=${ES_PATH_CONF:-@path.conf@}
IS_UPGRADE=false
@ -117,4 +117,4 @@ if [ "$PACKAGE" = "deb" ]; then
fi
fi
${scripts.footer}
@scripts.footer@

View file

@ -10,11 +10,11 @@
# $1=1 : indicates an upgrade
# source the default env file
if [ -f "${path.env}" ]; then
. "${path.env}"
if [ -f "@path.env@" ]; then
. "@path.env@"
fi
export ES_PATH_CONF=${ES_PATH_CONF:-${path.conf}}
export ES_PATH_CONF=${ES_PATH_CONF:-@path.conf@}
REMOVE_DIRS=false
REMOVE_JVM_OPTIONS_DIRECTORY=false
@ -114,4 +114,4 @@ if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then
fi
fi
${scripts.footer}
@scripts.footer@

View file

@ -1,9 +1,9 @@
# source the default env file
if [ -f "${path.env}" ]; then
. "${path.env}"
if [ -f "@path.env@" ]; then
. "@path.env@"
fi
export ES_PATH_CONF=${ES_PATH_CONF:-${path.conf}}
export ES_PATH_CONF=${ES_PATH_CONF:-@path.conf@}
if [ ! -f "${ES_PATH_CONF}"/elasticsearch.keystore ]; then
/usr/share/elasticsearch/bin/elasticsearch-keystore create
@ -19,4 +19,4 @@ else
fi
fi
${scripts.footer}
@scripts.footer@

View file

@ -16,11 +16,11 @@ err_exit() {
}
# source the default env file
if [ -f "${path.env}" ]; then
. "${path.env}"
if [ -f "@path.env@" ]; then
. "@path.env@"
fi
export ES_PATH_CONF=${ES_PATH_CONF:-${path.conf}}
export ES_PATH_CONF=${ES_PATH_CONF:-@path.conf@}
case "$1" in
@ -80,4 +80,4 @@ case "$1" in
;;
esac
${scripts.footer}
@scripts.footer@

View file

@ -10,11 +10,11 @@
# $1=1 : indicates an upgrade
# source the default env file
if [ -f "${path.env}" ]; then
. "${path.env}"
if [ -f "@path.env@" ]; then
. "@path.env@"
fi
export ES_PATH_CONF=${ES_PATH_CONF:-${path.conf}}
export ES_PATH_CONF=${ES_PATH_CONF:-@path.conf@}
STOP_REQUIRED=false
REMOVE_SERVICE=false
@ -92,4 +92,4 @@ if [ "$REMOVE_SERVICE" = "true" ]; then
fi
fi
${scripts.footer}
@scripts.footer@

View file

@ -9,10 +9,10 @@ Type=notify
RuntimeDirectory=elasticsearch
PrivateTmp=true
Environment=ES_HOME=/usr/share/elasticsearch
Environment=ES_PATH_CONF=${path.conf}
Environment=ES_PATH_CONF=@path.conf@
Environment=PID_DIR=/var/run/elasticsearch
Environment=ES_SD_NOTIFY=true
EnvironmentFile=-${path.env}
EnvironmentFile=-@path.env@
WorkingDirectory=/usr/share/elasticsearch
@ -63,4 +63,4 @@ TimeoutStartSec=75
[Install]
WantedBy=multi-user.target
# Built for ${project.name}-${project.version} (${project.name})
# Built for @project.name@-@project.version@ (@project.name@)

View file

@ -1,4 +1,4 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Copyright: Elasticsearch B.V. <info@elastic.co>
License: ${license.name}
${license.text}
License: @license.name@
@license.text@

View file

@ -35,12 +35,12 @@ fi
ES_HOME="/usr/share/elasticsearch"
MAX_OPEN_FILES=65535
MAX_MAP_COUNT=262144
ES_PATH_CONF="${path.conf}"
ES_PATH_CONF="@path.conf@"
PID_DIR="/var/run/elasticsearch"
# Source the default env file
ES_ENV_FILE="${path.env}"
ES_ENV_FILE="@path.env@"
if [ -f "$ES_ENV_FILE" ]; then
. "$ES_ENV_FILE"
fi
@ -102,7 +102,7 @@ start() {
stop() {
echo -n $"Stopping $prog: "
# stop it here, often "killproc $prog"
killproc -p $pidfile -d ${stopping.timeout} $prog
killproc -p $pidfile -d @stopping.timeout@ $prog
retval=$?
echo
[ $retval -eq 0 ] && rm -f $lockfile

View file

@ -84,7 +84,7 @@ fi
export HOSTNAME=$HOSTNAME
${source.path.env}
@source.path.env@
if [ -z "$ES_PATH_CONF" ]; then
echo "ES_PATH_CONF must be set to the configuration path"
@ -94,9 +94,9 @@ fi
# now make ES_PATH_CONF absolute
ES_PATH_CONF=`cd "$ES_PATH_CONF"; pwd`
ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor}
ES_DISTRIBUTION_TYPE=${es.distribution.type}
ES_BUNDLED_JDK=${es.bundled_jdk}
ES_DISTRIBUTION_FLAVOR=@es.distribution.flavor@
ES_DISTRIBUTION_TYPE=@es.distribution.type@
ES_BUNDLED_JDK=@es.bundled_jdk@
if [[ "$ES_BUNDLED_JDK" == "false" ]]; then
echo "warning: no-jdk distributions that do not bundle a JDK are deprecated and will be removed in a future release" >&2

View file

@ -25,9 +25,9 @@ if not defined ES_PATH_CONF (
rem now make ES_PATH_CONF absolute
for %%I in ("%ES_PATH_CONF%..") do set ES_PATH_CONF=%%~dpfI
set ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor}
set ES_DISTRIBUTION_TYPE=${es.distribution.type}
set ES_BUNDLED_JDK=${es.bundled_jdk}
set ES_DISTRIBUTION_FLAVOR=@es.distribution.flavor@
set ES_DISTRIBUTION_TYPE=@es.distribution.type@
set ES_BUNDLED_JDK=@es.bundled_jdk@
if "%ES_BUNDLED_JDK%" == "false" (
echo "warning: no-jdk distributions that do not bundle a JDK are deprecated and will be removed in a future release" >&2

View file

@ -17,7 +17,7 @@ echo elasticsearch-service-x64.exe was not found...
exit /B 1
:okExe
set ES_VERSION=${project.version}
set ES_VERSION=@project.version@
if "%SERVICE_LOG_DIR%" == "" set SERVICE_LOG_DIR=%ES_HOME%\logs

View file

@ -30,11 +30,11 @@
#
# Path to directory where to store the data (separate multiple locations by comma):
#
${path.data}
@path.data@
#
# Path to log files:
#
${path.logs}
@path.logs@
#
# ----------------------------------- Memory -----------------------------------
#

View file

@ -73,20 +73,20 @@
# specify an alternative path for heap dumps; ensure the directory exists and
# has sufficient space
${heap.dump.path}
@heap.dump.path@
# specify an alternative path for JVM fatal error logs
${error.file}
@error.file@
## JDK 8 GC logging
8:-XX:+PrintGCDetails
8:-XX:+PrintGCDateStamps
8:-XX:+PrintTenuringDistribution
8:-XX:+PrintGCApplicationStoppedTime
8:-Xloggc:${loggc}
8:-Xloggc:@loggc@
8:-XX:+UseGCLogFileRotation
8:-XX:NumberOfGCLogFiles=32
8:-XX:GCLogFileSize=64m
# JDK 9+ GC logging
9-:-Xlog:gc*,gc+age=trace,safepoint:file=${loggc}:utctime,pid,tags:filecount=32,filesize=64m
9-:-Xlog:gc*,gc+age=trace,safepoint:file=@loggc@:utctime,pid,tags:filecount=32,filesize=64m

View file

@ -6,8 +6,7 @@
* Side Public License, v 1.
*/
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.AntFixture
import org.elasticsearch.gradle.internal.test.RestIntegTestTask
@ -35,7 +34,7 @@ Map<String, Object> expansions = [
tasks.named("processYamlRestTestResources").configure {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}
// disable default yamlRestTest task, use spezialized ones below

View file

@ -3,7 +3,7 @@ setup:
- do:
cluster.health:
wait_for_status: green
wait_for_nodes: ${expected_nodes}
wait_for_nodes: @expected_nodes@
---
"All nodes are correctly discovered":
@ -12,4 +12,4 @@ setup:
nodes.info:
metric: [ transport ]
- match: { _nodes.total: ${expected_nodes} }
- match: { _nodes.total: @expected_nodes@ }

View file

@ -7,7 +7,7 @@
*/
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.AntFixture
@ -41,7 +41,7 @@ Map<String, Object> expansions = [
tasks.named("processYamlRestTestResources").configure {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}
tasks.named("yamlRestTest").configure {

View file

@ -3,7 +3,7 @@ setup:
- do:
cluster.health:
wait_for_status: green
wait_for_nodes: ${expected_nodes}
wait_for_nodes: @expected_nodes@
---
"All nodes are correctly discovered":
@ -12,4 +12,4 @@ setup:
nodes.info:
metric: [ transport ]
- match: { _nodes.total: ${expected_nodes} }
- match: { _nodes.total: @expected_nodes@ }

View file

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
@ -358,14 +358,14 @@ if (!azureAccount && !azureKey && !azureContainer && !azureBasePath && !azureSas
testFixtures.useFixture ':test:fixtures:azure-fixture', 'azure-fixture'
}
Map<String, Object> expansions = [
Map<String, String> expansions = [
'container': azureContainer,
'base_path': azureBasePath + "_integration_tests"
]
tasks.named("processYamlRestTestResources").configure {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions, ReplaceTokens.class)
}
tasks.named("internalClusterTest").configure {

View file

@ -9,9 +9,9 @@ setup:
body:
type: azure
settings:
container: ${container}
container: @container@
client: "integration_test"
base_path: ${base_path}
base_path: @base_path@
# Remove the snapshots, if a previous test failed to delete them. This is
# useful for third party tests that runs the test against a real external service.
@ -34,9 +34,9 @@ setup:
snapshot.get_repository:
repository: repository
- match: { repository.settings.container: ${container} }
- match: { repository.settings.container: @container@ }
- match: { repository.settings.client : "integration_test" }
- match: { repository.settings.base_path : "${base_path}" }
- match: { repository.settings.base_path : @base_path@ }
# Index documents
- do:

View file

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.RestIntegTestTask
import org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin
@ -262,7 +262,7 @@ Map<String, Object> expansions = [
tasks.named("processYamlRestTestResources").configure {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions, ReplaceTokens.class)
}
tasks.named("internalClusterTest").configure {

View file

@ -12,9 +12,9 @@ setup:
body:
type: gcs
settings:
bucket: ${bucket}
bucket: @bucket@
client: "integration_test"
base_path: "${base_path}"
base_path: "@base_path@"
# Remove the snapshots, if a previous test failed to delete them. This is
# useful for third party tests that runs the test against a real external service.
@ -37,9 +37,9 @@ setup:
snapshot.get_repository:
repository: repository
- match: { repository.settings.bucket : ${bucket} }
- match: { repository.settings.bucket : @bucket@ }
- match: { repository.settings.client : "integration_test" }
- match: { repository.settings.base_path : "${base_path}" }
- match: { repository.settings.base_path : "@base_path@" }
# Index documents
- do:

View file

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.RestIntegTestTask
import org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin
@ -168,7 +168,7 @@ tasks.named("processYamlRestTestResources").configure {
'disable_chunked_encoding': s3DisableChunkedEncoding,
]
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}
tasks.named("internalClusterTest").configure {

View file

@ -10,12 +10,12 @@ setup:
body:
type: s3
settings:
bucket: ${permanent_bucket}
bucket: @permanent_bucket@
client: integration_test_permanent
base_path: "${permanent_base_path}"
base_path: "@permanent_base_path@"
canned_acl: private
storage_class: standard
disable_chunked_encoding: ${disable_chunked_encoding}
disable_chunked_encoding: @disable_chunked_encoding@
# Remove the snapshots, if a previous test failed to delete them. This is
# useful for third party tests that runs the test against a real external service.
@ -41,9 +41,9 @@ setup:
body:
type: s3
settings:
bucket: ${permanent_bucket}
bucket: @permanent_bucket@
client: integration_test_permanent
base_path: "${permanent_base_path}"
base_path: "@permanent_base_path@"
endpoint: 127.0.0.1:5
canned_acl: private
storage_class: standard
@ -56,9 +56,9 @@ setup:
body:
type: s3
settings:
bucket: ${permanent_bucket}
bucket: @permanent_bucket@
client: integration_test_permanent
base_path: "${permanent_base_path}"
base_path: "@permanent_base_path@"
endpoint: 127.0.0.1:5
canned_acl: private
storage_class: standard
@ -115,9 +115,9 @@ setup:
snapshot.get_repository:
repository: repository_permanent
- match: { repository_permanent.settings.bucket : ${permanent_bucket} }
- match: { repository_permanent.settings.bucket : @permanent_bucket@ }
- match: { repository_permanent.settings.client : "integration_test_permanent" }
- match: { repository_permanent.settings.base_path : "${permanent_base_path}" }
- match: { repository_permanent.settings.base_path : "@permanent_base_path@" }
- match: { repository_permanent.settings.canned_acl : "private" }
- match: { repository_permanent.settings.storage_class : "standard" }
- is_false: repository_permanent.settings.access_key

View file

@ -10,12 +10,12 @@ setup:
body:
type: s3
settings:
bucket: ${temporary_bucket}
bucket: @temporary_bucket@
client: integration_test_temporary
base_path: "${temporary_base_path}"
base_path: "@temporary_base_path@"
canned_acl: private
storage_class: standard
disable_chunked_encoding: ${disable_chunked_encoding}
disable_chunked_encoding: @disable_chunked_encoding@
---
"Snapshot and Restore with repository-s3 using temporary credentials":
@ -25,9 +25,9 @@ setup:
snapshot.get_repository:
repository: repository_temporary
- match: { repository_temporary.settings.bucket : ${temporary_bucket} }
- match: { repository_temporary.settings.bucket : @temporary_bucket@ }
- match: { repository_temporary.settings.client : "integration_test_temporary" }
- match: { repository_temporary.settings.base_path : "${temporary_base_path}" }
- match: { repository_temporary.settings.base_path : "@temporary_base_path@" }
- match: { repository_temporary.settings.canned_acl : "private" }
- match: { repository_temporary.settings.storage_class : "standard" }
- is_false: repository_temporary.settings.access_key

View file

@ -10,12 +10,12 @@ setup:
body:
type: s3
settings:
bucket: ${ec2_bucket}
bucket: @ec2_bucket@
client: integration_test_ec2
base_path: "${ec2_base_path}"
base_path: "@ec2_base_path@"
canned_acl: private
storage_class: standard
disable_chunked_encoding: ${disable_chunked_encoding}
disable_chunked_encoding: @disable_chunked_encoding@
---
"Snapshot and Restore with repository-s3 using ec2 credentials":
@ -25,9 +25,9 @@ setup:
snapshot.get_repository:
repository: repository_ec2
- match: { repository_ec2.settings.bucket : ${ec2_bucket} }
- match: { repository_ec2.settings.bucket : @ec2_bucket@ }
- match: { repository_ec2.settings.client : "integration_test_ec2" }
- match: { repository_ec2.settings.base_path : "${ec2_base_path}" }
- match: { repository_ec2.settings.base_path : "@ec2_base_path@" }
- match: { repository_ec2.settings.canned_acl : "private" }
- match: { repository_ec2.settings.storage_class : "standard" }
- is_false: repository_ec2.settings.access_key

View file

@ -10,12 +10,12 @@ setup:
body:
type: s3
settings:
bucket: ${ecs_bucket}
bucket: @ecs_bucket@
client: integration_test_ecs
base_path: "${ecs_base_path}"
base_path: "@ecs_base_path@"
canned_acl: private
storage_class: standard
disable_chunked_encoding: ${disable_chunked_encoding}
disable_chunked_encoding: @disable_chunked_encoding@
---
"Snapshot and Restore with repository-s3 using ecs credentials":
@ -25,9 +25,9 @@ setup:
snapshot.get_repository:
repository: repository_ecs
- match: { repository_ecs.settings.bucket : ${ecs_bucket} }
- match: { repository_ecs.settings.bucket : @ecs_bucket@ }
- match: { repository_ecs.settings.client : "integration_test_ecs" }
- match: { repository_ecs.settings.base_path : "${ecs_base_path}" }
- match: { repository_ecs.settings.base_path : "@ecs_base_path@" }
- match: { repository_ecs.settings.canned_acl : "private" }
- match: { repository_ecs.settings.storage_class : "standard" }
- is_false: repository_ecs.settings.access_key

View file

@ -6,8 +6,7 @@
* Side Public License, v 1.
*/
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
apply plugin: 'elasticsearch.internal-testclusters'
@ -38,5 +37,5 @@ ext.expansions = [
tasks.named("processTestResources").configure {
assert pluginPaths.size() > 0
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}

View file

@ -10,4 +10,4 @@
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.plugins.count} }
- length: { nodes.$master.plugins: @expected.plugins.count@ }

View file

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import java.nio.file.Files
@ -77,7 +77,7 @@ tasks.named("processResources").configure {
duplicatesStrategy = DuplicatesStrategy.INCLUDE
exclude '**/public.key'
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions, ReplaceTokens.class)
}
String licenseKey = System.getProperty("license.key")
if (licenseKey != null) {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import java.nio.file.Files
import java.security.KeyPair
@ -88,7 +88,7 @@ Map<String, Object> expansions = [
tasks.named("processTestResources").configure {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions, ReplaceTokens.class)
}
if (useFixture) {

View file

@ -1,5 +1,5 @@
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.MavenFilteringHack
import java.nio.file.Files
import java.security.KeyPair
@ -77,7 +77,7 @@ Map<String, Object> expansions = [
tasks.named("processTestResources").configure {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions, ReplaceTokens.class)
}
if (useFixture) {

View file

@ -6,7 +6,6 @@
*/
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.MavenFilteringHack
import java.nio.file.Files
import java.security.KeyPair

View file

@ -1,7 +1,5 @@
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.elasticsearch.gradle.testclusters.WaitForHttpResource
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
apply plugin: 'elasticsearch.internal-testclusters'
@ -88,15 +86,11 @@ ext.expansions = [
'expected.plugins.count': pluginPaths.size()
]
ext.expansions = [
'expected.plugins.count': pluginPaths.size()
]
tasks.named("processTestResources").configure {
from(sourceSets.test.resources.srcDirs) {
duplicatesStrategy = DuplicatesStrategy.INCLUDE
include '**/*.yml'
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}
}

View file

@ -10,4 +10,4 @@
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.plugins.count} }
- length: { nodes.$master.plugins: @expected.plugins.count@ }

View file

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.internal.MavenFilteringHack
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
apply plugin: 'elasticsearch.internal-testclusters'
@ -21,12 +21,12 @@ def pluginPaths = project(':plugins').getChildProjects().findAll { pluginName, p
}.collect {pluginName, pluginProject -> pluginProject.path }
ext.expansions = [
'expected.plugins.count': pluginPaths.size()
'expected.plugins.count': pluginPaths.size()
]
tasks.named("processTestResources").configure {
inputs.properties(project.expansions)
MavenFilteringHack.filter(it, expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class)
}
testClusters.matching { it.name == "integTest" }.configureEach {

View file

@ -10,5 +10,5 @@
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.plugins.count} }
- length: { nodes.$master.plugins: @expected.plugins.count@ }
# TODO: check that every plugin is installed