Merge main into multi-project

This commit is contained in:
Yang Wang 2024-11-23 16:55:53 +11:00
commit b8f8656fda
357 changed files with 5251 additions and 1824 deletions

View file

@ -8,6 +8,7 @@ steps:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8

View file

@ -9,6 +9,7 @@ steps:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8

View file

@ -8,6 +8,7 @@ steps:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8

View file

@ -3,68 +3,15 @@ config:
steps:
- group: packaging-tests-unix
steps:
- label: "{{matrix.image}} / docker / packaging-tests-unix"
key: "packaging-tests-unix-docker"
command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker-cloud-ess
timeout_in_minutes: 300
matrix:
setup:
image:
- debian-11
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
- sles-12
- sles-15
- ubuntu-1804
- ubuntu-2004
- ubuntu-2204
- rocky-8
- rocky-9
- rhel-7
- rhel-8
- rhel-9
- almalinux-8
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
diskSizeGb: 350
machineType: custom-16-32768
- label: "{{matrix.image}} / packages / packaging-tests-unix"
key: "packaging-tests-unix-packages"
command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.packages
timeout_in_minutes: 300
matrix:
setup:
image:
- debian-11
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
- sles-12
- sles-15
- ubuntu-1804
- ubuntu-2004
- ubuntu-2204
- rocky-8
- rocky-9
- rhel-7
- rhel-8
- rhel-9
- almalinux-8
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
diskSizeGb: 350
machineType: custom-16-32768
- label: "{{matrix.image}} / archives / packaging-tests-unix"
key: "packaging-tests-unix-archives"
command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.archives
- label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix"
key: "packaging-tests-unix"
command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.{{matrix.PACKAGING_TASK}}
timeout_in_minutes: 300
matrix:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
@ -79,6 +26,11 @@ steps:
- rhel-8
- rhel-9
- almalinux-8
PACKAGING_TASK:
- docker
- docker-cloud-ess
- packages
- archives
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}

View file

@ -6,7 +6,7 @@ WORKFLOW="${DRA_WORKFLOW:-snapshot}"
BRANCH="${BUILDKITE_BRANCH:-}"
# Don't publish main branch to staging
if [[ "$BRANCH" == "main" && "$WORKFLOW" == "staging" ]]; then
if [[ ("$BRANCH" == "main" || "$BRANCH" == *.x) && "$WORKFLOW" == "staging" ]]; then
exit 0
fi

View file

@ -20,9 +20,6 @@ class ElasticsearchJavaPluginFuncTest extends AbstractGradleInternalPluginFuncTe
when:
buildFile.text << """
import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.internal.info.BuildParams
BuildParams.init { it.setMinimumRuntimeVersion(JavaVersion.VERSION_1_10) }
assert tasks.named('compileJava').get().sourceCompatibility == JavaVersion.VERSION_1_10.toString()
assert tasks.named('compileJava').get().targetCompatibility == JavaVersion.VERSION_1_10.toString()
"""

View file

@ -9,11 +9,9 @@
import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.rest.RestTestBasePlugin
import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask
import org.elasticsearch.gradle.testclusters.TestClustersAware
import org.elasticsearch.gradle.testclusters.TestDistribution
//apply plugin: org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin
@ -25,12 +23,12 @@ if (buildParams.inFipsJvm) {
File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename)
File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy')
File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks')
def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.4')
def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17')
def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.5')
def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19')
def manualDebug = false; //change this to manually debug bouncy castle in an IDE
if(manualDebug) {
bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.4')
bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17'){
bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.5')
bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19'){
exclude group: 'org.bouncycastle', module: 'bc-fips' // to avoid jar hell
}
}

View file

@ -9,7 +9,6 @@
import org.elasticsearch.gradle.util.Pair
import org.elasticsearch.gradle.util.GradleUtils
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.TestUtil
import org.jetbrains.gradle.ext.JUnit

View file

@ -3,7 +3,6 @@ import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.BwcVersions
import org.elasticsearch.gradle.internal.JarApiComparisonTask
import org.elasticsearch.gradle.internal.info.BuildParams
import static org.elasticsearch.gradle.internal.InternalDistributionBwcSetupPlugin.buildBwcTaskName

View file

@ -13,10 +13,8 @@ package org.elasticsearch.gradle.internal;
* This class models the different Docker base images that are used to build Docker distributions of Elasticsearch.
*/
public enum DockerBase {
DEFAULT("ubuntu:20.04", "", "apt-get"),
// "latest" here is intentional, since the image name specifies "8"
UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi", "microdnf"),
DEFAULT("docker.elastic.co/ubi8/ubi-minimal:latest", "", "microdnf"),
// The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build
IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"),

View file

@ -173,9 +173,6 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER) {
return projectName + "docker" + archString + "-export";
}
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_UBI) {
return projectName + "ubi-docker" + archString + "-export";
}
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_IRONBANK) {
return projectName + "ironbank-docker" + archString + "-export";
}

View file

@ -21,6 +21,7 @@ import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.compile.CompileOptions;
import org.gradle.api.tasks.compile.JavaCompile;
import org.gradle.api.tasks.javadoc.Javadoc;
@ -87,6 +88,7 @@ public class MrjarPlugin implements Plugin<Project> {
String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion;
SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion);
configureSourceSetInJar(project, mainSourceSet, javaVersion);
addJar(project, mainSourceSet, javaVersion);
mainSourceSets.add(mainSourceSetName);
testSourceSets.add(mainSourceSetName);
@ -147,6 +149,14 @@ public class MrjarPlugin implements Plugin<Project> {
return sourceSet;
}
private void addJar(Project project, SourceSet sourceSet, int javaVersion) {
project.getConfigurations().register("java" + javaVersion);
TaskProvider<Jar> jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> {
task.from(sourceSet.getOutput());
});
project.getArtifacts().add("java" + javaVersion, jarTask);
}
private void configurePreviewFeatures(Project project, SourceSet sourceSet, int javaVersion) {
project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> {
CompileOptions compileOptions = compileTask.getOptions();

View file

@ -17,7 +17,6 @@ public class InternalElasticsearchDistributionTypes {
public static ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType();
public static ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_UBI = new DockerUbiElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType();
@ -26,7 +25,6 @@ public class InternalElasticsearchDistributionTypes {
DEB,
RPM,
DOCKER,
DOCKER_UBI,
DOCKER_IRONBANK,
DOCKER_CLOUD_ESS,
DOCKER_WOLFI

View file

@ -1,80 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.gradle.internal.info;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.function.Consumer;
@Deprecated
public class BuildParams {
private static Boolean isCi;
/**
* Initialize global build parameters. This method accepts and a initialization function which in turn accepts a
* {@link MutableBuildParams}. Initialization can be done in "stages", therefore changes override existing values, and values from
* previous calls to {@link #init(Consumer)} carry forward. In cases where you want to clear existing values
* {@link MutableBuildParams#reset()} may be used.
*
* @param initializer Build parameter initializer
*/
public static void init(Consumer<MutableBuildParams> initializer) {
initializer.accept(MutableBuildParams.INSTANCE);
}
public static Boolean isCi() {
return value(isCi);
}
private static <T> T value(T object) {
if (object == null) {
String callingMethod = Thread.currentThread().getStackTrace()[2].getMethodName();
throw new IllegalStateException(
"Build parameter '"
+ propertyName(callingMethod)
+ "' has not been initialized.\n"
+ "Perhaps the plugin responsible for initializing this property has not been applied."
);
}
return object;
}
private static String propertyName(String methodName) {
String propertyName = methodName.startsWith("is") ? methodName.substring("is".length()) : methodName.substring("get".length());
return propertyName.substring(0, 1).toLowerCase() + propertyName.substring(1);
}
public static class MutableBuildParams {
private static MutableBuildParams INSTANCE = new MutableBuildParams();
private MutableBuildParams() {}
/**
* Resets any existing values from previous initializations.
*/
public void reset() {
Arrays.stream(BuildParams.class.getDeclaredFields()).filter(f -> Modifier.isStatic(f.getModifiers())).forEach(f -> {
try {
// Since we are mutating private static fields from a public static inner class we need to suppress
// accessibility controls here.
f.setAccessible(true);
f.set(null, null);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
});
}
public void setIsCi(boolean isCi) {
BuildParams.isCi = isCi;
}
}
}

View file

@ -152,13 +152,6 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
spec.getParameters().getBuildParams().set(buildParams);
});
BuildParams.init(params -> {
params.reset();
params.setIsCi(
System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null
);
});
// Enforce the minimum compiler version
assertMinimumCompilerVersion(minimumCompilerVersion);

View file

@ -51,7 +51,6 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsear
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM;
import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams;
@ -148,7 +147,6 @@ public class DistroTestPlugin implements Plugin<Project> {
private static Map<ElasticsearchDistributionType, TaskProvider<?>> lifecycleTasks(Project project, String taskPrefix) {
Map<ElasticsearchDistributionType, TaskProvider<?>> lifecyleTasks = new HashMap<>();
lifecyleTasks.put(DOCKER, project.getTasks().register(taskPrefix + ".docker"));
lifecyleTasks.put(DOCKER_UBI, project.getTasks().register(taskPrefix + ".docker-ubi"));
lifecyleTasks.put(DOCKER_IRONBANK, project.getTasks().register(taskPrefix + ".docker-ironbank"));
lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess"));
lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi"));

View file

@ -57,7 +57,7 @@
unfair. -->
<module name="LineLength">
<property name="max" value="140" />
<property name="ignorePattern" value="^(?:(?:package|import) .*| *\* *https?://[^ ]+)$" />
<property name="ignorePattern" value="^(?:(?:package|import) .*| *\*.*https?://.*)$" />
</module>
<module name="TreeWalker">

View file

@ -5,6 +5,7 @@ grant {
permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms";
permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms";
permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters";
permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size";
permission java.lang.RuntimePermission "getProtectionDomain";
permission java.util.PropertyPermission "java.runtime.name", "read";
permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled";
@ -20,6 +21,6 @@ grant {
};
// rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect
grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" {
grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" {
permission java.net.SocketPermission "*", "connect";
};

View file

@ -3,8 +3,7 @@
The ES build can generate several types of Docker image. These are enumerated in
the [DockerBase] enum.
* Default - this is what most people use, and is based on Ubuntu
* UBI - the same as the default image, but based upon [RedHat's UBI
* Default - this is what most people use, and is based on [RedHat's UBI
images][ubi], specifically their minimal flavour.
* Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev)
* Cloud ESS - this directly extends the Wolfi image, and adds all ES plugins
@ -23,14 +22,7 @@ the [DockerBase] enum.
software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is
another UBI build, this time on the regular UBI image, with extra
hardening. See below for more details.
* Cloud - this is mostly the same as the default image, with some notable differences:
* `filebeat` and `metricbeat` are included
* `wget` is included
* The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is
`/app/elasticsearch.sh`. In normal use this file would be bind-mounted
in, but the image ships a stub version of this file so that the image
can still be tested.
The long-term goal is for both Cloud images to be retired in favour of the
The long-term goal is for Cloud ESS image to be retired in favour of the
default image.

View file

@ -527,9 +527,7 @@ subprojects { Project subProject ->
final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64
DockerBase base = DockerBase.DEFAULT
if (subProject.name.contains('ubi-')) {
base = DockerBase.UBI
} else if (subProject.name.contains('ironbank-')) {
if (subProject.name.contains('ironbank-')) {
base = DockerBase.IRON_BANK
} else if (subProject.name.contains('cloud-ess-')) {
base = DockerBase.CLOUD_ESS
@ -538,7 +536,7 @@ subprojects { Project subProject ->
}
final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
final String extension = base == DockerBase.UBI ? 'ubi.tar' :
final String extension =
(base == DockerBase.IRON_BANK ? 'ironbank.tar' :
(base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' :
(base == DockerBase.WOLFI ? 'wolfi.tar' :

View file

@ -41,9 +41,7 @@ RUN chmod 0555 /bin/tini
<% } else { %>
# Install required packages to extract the Elasticsearch distribution
<% if (docker_base == 'default' || docker_base == 'cloud') { %>
RUN <%= retry.loop(package_manager, "${package_manager} update && DEBIAN_FRONTEND=noninteractive ${package_manager} install -y curl ") %>
<% } else if (docker_base == "wolfi") { %>
<% if (docker_base == "wolfi") { %>
RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %>
<% } else { %>
RUN <%= retry.loop(package_manager, "${package_manager} install -y findutils tar gzip") %>
@ -117,27 +115,6 @@ RUN sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' bin/elas
chmod 0775 bin config config/jvm.options.d data logs plugins && \\
find config -type f -exec chmod 0664 {} +
<% if (docker_base == "cloud") { %>
COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/
RUN set -eux ; \\
for beat in filebeat metricbeat ; do \\
if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\
echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\
exit 1 ; \\
fi ; \\
if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\
echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\
exit 1 ; \\
fi ; \\
mkdir -p /opt/\$beat ; \\
tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\
done
# Add plugins infrastructure
RUN mkdir -p /opt/plugins/archive
RUN chmod -R 0555 /opt/plugins
<% } %>
################################################################################
# Build stage 2 (the actual Elasticsearch image):
#
@ -173,21 +150,6 @@ SHELL ["/bin/bash", "-c"]
# Optionally set Bash as the default shell in the container at runtime
CMD ["/bin/bash"]
<% } else if (docker_base == "default" || docker_base == "cloud") { %>
# Change default shell to bash, then install required packages with retries.
RUN yes no | dpkg-reconfigure dash && \\
<%= retry.loop(
package_manager,
"export DEBIAN_FRONTEND=noninteractive && \n" +
" ${package_manager} update && \n" +
" ${package_manager} upgrade -y && \n" +
" ${package_manager} install -y --no-install-recommends \n" +
" ca-certificates curl netcat p11-kit unzip zip ${docker_base == 'cloud' ? 'wget' : '' } && \n" +
" ${package_manager} clean && \n" +
" rm -rf /var/lib/apt/lists/*"
) %>
<% } else { %>
RUN <%= retry.loop(
@ -201,12 +163,7 @@ RUN <%= retry.loop(
<% } %>
<% if (docker_base == "default" || docker_base == "cloud") { %>
RUN groupadd -g 1000 elasticsearch && \\
adduser --uid 1000 --gid 1000 --home /usr/share/elasticsearch elasticsearch && \\
adduser elasticsearch root && \\
chown -R 0:0 /usr/share/elasticsearch
<% } else if (docker_base == "wolfi") { %>
<% if (docker_base == "wolfi") { %>
RUN groupadd -g 1000 elasticsearch && \
adduser -G elasticsearch -u 1000 elasticsearch -D --home /usr/share/elasticsearch elasticsearch && \
adduser elasticsearch root && \
@ -226,10 +183,6 @@ COPY --from=builder --chown=0:0 /usr/share/elasticsearch /usr/share/elasticsearc
COPY --from=builder --chown=0:0 /bin/tini /bin/tini
<% } %>
<% if (docker_base == 'cloud') { %>
COPY --from=builder --chown=0:0 /opt /opt
<% } %>
ENV PATH /usr/share/elasticsearch/bin:\$PATH
ENV SHELL /bin/bash
COPY ${bin_dir}/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
@ -251,12 +204,7 @@ RUN chmod g=u /etc/passwd && \\
chmod 0775 /usr/share/elasticsearch && \\
chown elasticsearch bin config config/jvm.options.d data logs plugins
<% if (docker_base == 'default' || docker_base == 'cloud') { %>
# Update "cacerts" bundle to use Ubuntu's CA certificates (and make sure it
# stays up-to-date with changes to Ubuntu's store)
COPY bin/docker-openjdk /etc/ca-certificates/update.d/docker-openjdk
RUN /etc/ca-certificates/update.d/docker-openjdk
<% } else if (docker_base == 'wolfi') { %>
<% if (docker_base == 'wolfi') { %>
RUN ln -sf /etc/ssl/certs/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts
<% } else { %>
RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts
@ -284,9 +232,7 @@ LABEL org.label-schema.build-date="${build_date}" \\
org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\
org.opencontainers.image.vendor="Elastic" \\
org.opencontainers.image.version="${version}"
<% } %>
<% if (docker_base == 'ubi') { %>
LABEL name="Elasticsearch" \\
maintainer="infra@elastic.co" \\
vendor="Elastic" \\
@ -296,21 +242,12 @@ LABEL name="Elasticsearch" \\
description="You know, for search."
<% } %>
<% if (docker_base == 'ubi') { %>
RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE
<% } else if (docker_base == 'iron_bank') { %>
RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE
<% if (docker_base == 'iron_bank') { %>
COPY LICENSE /licenses/LICENSE.addendum
<% } %>
<% if (docker_base == "cloud") { %>
ENTRYPOINT ["/bin/tini", "--"]
CMD ["/app/elasticsearch.sh"]
# Generate a stub command that will be overwritten at runtime
RUN mkdir /app && \\
echo -e '#!/bin/bash\\nexec /usr/local/bin/docker-entrypoint.sh eswrapper' > /app/elasticsearch.sh && \\
chmod 0555 /app/elasticsearch.sh
<% } else if (docker_base == "wolfi") { %>
<% if (docker_base == "wolfi") { %>
# Our actual entrypoint is `tini`, a minimal but functional init program. It
# calls the entrypoint we provide, while correctly forwarding signals.
ENTRYPOINT ["/sbin/tini", "--", "/usr/local/bin/docker-entrypoint.sh"]

View file

@ -1,2 +0,0 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.

View file

@ -1,2 +0,0 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.

View file

@ -10,7 +10,6 @@
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.OS
import org.elasticsearch.gradle.internal.info.BuildParams
import org.redline_rpm.header.Flags
import java.nio.file.Files
@ -335,7 +334,6 @@ Closure commonDebConfig(String architecture) {
// versions found on oldest supported distro, centos-6
requires('bash', '4.1', GREATER | EQUAL)
requires('lsb-base', '4', GREATER | EQUAL)
requires 'libc6'
requires 'adduser'

View file

@ -5,8 +5,6 @@
changelog-file-missing-in-native-package
# we intentionally copy our copyright file for all deb packages
copyright-file-contains-full-apache-2-license
copyright-not-using-common-license-for-apache2
copyright-without-copyright-notice
# we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages
@ -16,37 +14,23 @@ arch-dependent-file-in-usr-share
missing-dep-on-jarwrapper
# we prefer to not make our config and log files world readable
non-standard-file-perm etc/default/elasticsearch 0660 != 0644
non-standard-dir-perm etc/elasticsearch/ 2750 != 0755
non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755
non-standard-file-perm etc/elasticsearch/*
non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755
non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755
# this lintian tag is simply wrong; contrary to the explanation, Debian systemd
# does actually look at /usr/lib/systemd/system
systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service
non-standard-file-perm 0660 != 0644 [etc/default/elasticsearch]
non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/]
non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/jvm.options.d/]
non-standard-file-perm 0660 != 0644 [etc/elasticsearch/*]
non-standard-dir-perm 2750 != 0755 [var/lib/elasticsearch/]
non-standard-dir-perm 2750 != 0755 [var/log/elasticsearch/]
# the package scripts handle systemd directly and don't need to use deb helpers
maintainer-script-calls-systemctl
# bundled JDK
embedded-library
unstripped-binary-or-object usr/share/elasticsearch/jdk/*
extra-license-file usr/share/elasticsearch/jdk/legal/*
hardening-no-pie usr/share/elasticsearch/jdk/bin/*
hardening-no-pie usr/share/elasticsearch/jdk/lib/*
unstripped-binary-or-object [usr/share/elasticsearch/jdk/*]
# the system java version that lintian assumes is far behind what elasticsearch uses
unknown-java-class-version
# elastic licensed modules contain elastic license
extra-license-file usr/share/elasticsearch/modules/*
# This dependency appears to have a packaging flaw, and includes a
# generated source file alongside the compiled version
jar-contains-source usr/share/elasticsearch/modules/repository-gcs/api-common*.jar *
# There's no `License` field in Debian control files, but earlier versions
# of `lintian` were more permissive. Override this warning so that we can
# run `lintian` on different releases of Debian. The format of this override
@ -58,8 +42,27 @@ unknown-field License
# indirectly to libc via libdl. This might not be best practice but we
# don't build them ourselves and the license precludes us modifying them
# to fix this.
library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so
library-not-linked-against-libc [usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so*]
# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so
# which has no dependencies (not even libc) besides the symbols in the base executable.
shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so
# Below is the copy of some of the above rules in format for Lintian versions <= 2.104 (Debian 11)
# Override syntax changes between Lintian versions in a non-backwards compatible way, so we handle it with
# duplication and ignoring some issues in the test code.
# we prefer to not make our config and log files world readable
non-standard-file-perm etc/default/elasticsearch 0660 != 0644
non-standard-dir-perm etc/elasticsearch/ 2750 != 0755
non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755
non-standard-file-perm etc/elasticsearch/*
non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755
non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755
# bundled JDK
unstripped-binary-or-object usr/share/elasticsearch/jdk/*
# Intel MKL libraries are not linked directly to libc. They are linked
# indirectly to libc via libdl. This might not be best practice but we
# don't build them ourselves and the license precludes us modifying them
# to fix this.
library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so*

View file

@ -29,7 +29,7 @@ dependencies {
implementation 'org.ow2.asm:asm-tree:9.7'
api "org.bouncycastle:bcpg-fips:1.0.7.1"
api "org.bouncycastle:bc-fips:1.0.2.4"
api "org.bouncycastle:bc-fips:1.0.2.5"
testImplementation project(":test:framework")
testImplementation "com.google.jimfs:jimfs:${versions.jimfs}"
testRuntimeOnly "com.google.guava:guava:${versions.jimfs_guava}"

View file

@ -0,0 +1,5 @@
pr: 112989
summary: Upgrade Bouncy Castle FIPS dependencies
area: Security
type: upgrade
issues: []

View file

@ -0,0 +1,6 @@
pr: 115616
summary: Fix double lookup failure on ESQL
area: ES|QL
type: bug
issues:
- 111398

View file

@ -0,0 +1,5 @@
pr: 116531
summary: "Add a standard deviation aggregating function: STD_DEV"
area: ES|QL
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 116809
summary: "Distinguish `LicensedFeature` by family field"
area: License
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 117148
summary: Preserve thread context when waiting for segment generation in RTG
area: CRUD
type: bug
issues: []

View file

@ -0,0 +1,6 @@
pr: 117201
summary: "Use `field_caps` native nested fields filtering"
area: ES|QL
type: bug
issues:
- 117054

View file

@ -0,0 +1,5 @@
pr: 117243
summary: Bump major version for feature migration system indices
area: Infra/Core
type: upgrade
issues: []

View file

@ -0,0 +1,5 @@
pr: 117287
summary: Fixing bug setting index when parsing Google Vertex AI results
area: Machine Learning
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 117294
summary: Always Emit Inference ID in Semantic Text Mapping
area: Mapping
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 117297
summary: Fix CCS exchange when multi cluster aliases point to same cluster
area: ES|QL
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 117316
summary: Fix validation of SORT by aggregate functions
area: ES|QL
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 117350
summary: "Improve halfbyte transposition performance, marginally improving bbq performance"
area: Vector Search
type: enhancement
issues: []

View file

@ -17,10 +17,11 @@ The <<esql-stats-by>> command supports these aggregate functions:
* <<esql-min>>
* <<esql-percentile>>
* experimental:[] <<esql-st_centroid_agg>>
* <<esql-std_dev>>
* <<esql-sum>>
* <<esql-top>>
* <<esql-values>>
* experimental:[] <<esql-weighted_avg>>
* <<esql-weighted_avg>>
// end::agg_list[]
include::layout/avg.asciidoc[]
@ -32,6 +33,7 @@ include::layout/median_absolute_deviation.asciidoc[]
include::layout/min.asciidoc[]
include::layout/percentile.asciidoc[]
include::layout/st_centroid_agg.asciidoc[]
include::layout/std_dev.asciidoc[]
include::layout/sum.asciidoc[]
include::layout/top.asciidoc[]
include::layout/values.asciidoc[]

View file

@ -0,0 +1,5 @@
// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
*Description*
The standard deviation of a numeric field.

View file

@ -0,0 +1,22 @@
// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
*Examples*
[source.merge.styled,esql]
----
include::{esql-specs}/stats.csv-spec[tag=stdev]
----
[%header.monospaced.styled,format=dsv,separator=|]
|===
include::{esql-specs}/stats.csv-spec[tag=stdev-result]
|===
The expression can use inline functions. For example, to calculate the standard deviation of each employee's maximum salary changes, first use `MV_MAX` on each row, and then use `STD_DEV` on the result
[source.merge.styled,esql]
----
include::{esql-specs}/stats.csv-spec[tag=docsStatsStdDevNestedExpression]
----
[%header.monospaced.styled,format=dsv,separator=|]
|===
include::{esql-specs}/stats.csv-spec[tag=docsStatsStdDevNestedExpression-result]
|===

View file

@ -22,6 +22,42 @@
"variadic" : false,
"returnType" : "boolean"
},
{
"params" : [
{
"name" : "field",
"type" : "keyword",
"optional" : false,
"description" : "Field that the query will target."
},
{
"name" : "query",
"type" : "text",
"optional" : false,
"description" : "Text you wish to find in the provided field."
}
],
"variadic" : false,
"returnType" : "boolean"
},
{
"params" : [
{
"name" : "field",
"type" : "text",
"optional" : false,
"description" : "Field that the query will target."
},
{
"name" : "query",
"type" : "keyword",
"optional" : false,
"description" : "Text you wish to find in the provided field."
}
],
"variadic" : false,
"returnType" : "boolean"
},
{
"params" : [
{

View file

@ -0,0 +1,50 @@
{
"comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.",
"type" : "agg",
"name" : "std_dev",
"description" : "The standard deviation of a numeric field.",
"signatures" : [
{
"params" : [
{
"name" : "number",
"type" : "double",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "double"
},
{
"params" : [
{
"name" : "number",
"type" : "integer",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "double"
},
{
"params" : [
{
"name" : "number",
"type" : "long",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "double"
}
],
"examples" : [
"FROM employees\n| STATS STD_DEV(height)",
"FROM employees\n| STATS stddev_salary_change = STD_DEV(MV_MAX(salary_change))"
],
"preview" : false,
"snapshot_only" : false
}

View file

@ -0,0 +1,11 @@
<!--
This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
-->
### STD_DEV
The standard deviation of a numeric field.
```
FROM employees
| STATS STD_DEV(height)
```

View file

@ -0,0 +1,15 @@
// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
[discrete]
[[esql-std_dev]]
=== `STD_DEV`
*Syntax*
[.text-center]
image::esql/functions/signature/std_dev.svg[Embedded,opts=inline]
include::../parameters/std_dev.asciidoc[]
include::../description/std_dev.asciidoc[]
include::../types/std_dev.asciidoc[]
include::../examples/std_dev.asciidoc[]

View file

@ -19,6 +19,7 @@
* <<esql-mv_median>>
* <<esql-mv_median_absolute_deviation>>
* <<esql-mv_min>>
* <<esql-mv_percentile>>
* <<esql-mv_pseries_weighted_sum>>
* <<esql-mv_sort>>
* <<esql-mv_slice>>
@ -37,6 +38,7 @@ include::layout/mv_max.asciidoc[]
include::layout/mv_median.asciidoc[]
include::layout/mv_median_absolute_deviation.asciidoc[]
include::layout/mv_min.asciidoc[]
include::layout/mv_percentile.asciidoc[]
include::layout/mv_pseries_weighted_sum.asciidoc[]
include::layout/mv_slice.asciidoc[]
include::layout/mv_sort.asciidoc[]

View file

@ -0,0 +1,6 @@
// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
*Parameters*
`number`::

View file

@ -0,0 +1 @@
<svg version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg" width="300" height="46" viewbox="0 0 300 46"><defs><style type="text/css">#guide .c{fill:none;stroke:#222222;}#guide .k{fill:#000000;font-family:Roboto Mono,Sans-serif;font-size:20px;}#guide .s{fill:#e4f4ff;stroke:#222222;}#guide .syn{fill:#8D8D8D;font-family:Roboto Mono,Sans-serif;font-size:20px;}</style></defs><path class="c" d="M0 31h5m104 0h10m32 0h10m92 0h10m32 0h5"/><rect class="s" x="5" y="5" width="104" height="36"/><text class="k" x="15" y="31">STD_DEV</text><rect class="s" x="119" y="5" width="32" height="36" rx="7"/><text class="syn" x="129" y="31">(</text><rect class="s" x="161" y="5" width="92" height="36" rx="7"/><text class="k" x="171" y="31">number</text><rect class="s" x="263" y="5" width="32" height="36" rx="7"/><text class="syn" x="273" y="31">)</text></svg>

After

Width:  |  Height:  |  Size: 884 B

View file

@ -6,5 +6,7 @@
|===
field | query | result
keyword | keyword | boolean
keyword | text | boolean
text | keyword | boolean
text | text | boolean
|===

View file

@ -0,0 +1,11 @@
// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
*Supported types*
[%header.monospaced.styled,format=dsv,separator=|]
|===
number | result
double | double
integer | double
long | double
|===

View file

@ -1,15 +1,17 @@
[discrete]
[[esql-stats-by]]
=== `STATS ... BY`
=== `STATS`
The `STATS ... BY` processing command groups rows according to a common value
The `STATS` processing command groups rows according to a common value
and calculates one or more aggregated values over the grouped rows.
**Syntax**
[source,esql]
----
STATS [column1 =] expression1[, ..., [columnN =] expressionN]
STATS [column1 =] expression1 [WHERE boolean_expression1][,
...,
[columnN =] expressionN [WHERE boolean_expressionN]]
[BY grouping_expression1[, ..., grouping_expressionN]]
----
@ -28,14 +30,18 @@ An expression that computes an aggregated value.
An expression that outputs the values to group by.
If its name coincides with one of the computed columns, that column will be ignored.
`boolean_expressionX`::
The condition that must be met for a row to be included in the evaluation of `expressionX`.
NOTE: Individual `null` values are skipped when computing aggregations.
*Description*
The `STATS ... BY` processing command groups rows according to a common value
and calculate one or more aggregated values over the grouped rows. If `BY` is
omitted, the output table contains exactly one row with the aggregations applied
over the entire dataset.
The `STATS` processing command groups rows according to a common value
and calculates one or more aggregated values over the grouped rows. For the
calculation of each aggregated value, the rows in a group can be filtered with
`WHERE`. If `BY` is omitted, the output table contains exactly one row with
the aggregations applied over the entire dataset.
The following <<esql-agg-functions,aggregation functions>> are supported:
@ -90,6 +96,29 @@ include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues]
include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues-result]
|===
To filter the rows that go into an aggregation, use the `WHERE` clause:
[source.merge.styled,esql]
----
include::{esql-specs}/stats.csv-spec[tag=aggFiltering]
----
[%header.monospaced.styled,format=dsv,separator=|]
|===
include::{esql-specs}/stats.csv-spec[tag=aggFiltering-result]
|===
The aggregations can be mixed, with and without a filter and grouping is
optional as well:
[source.merge.styled,esql]
----
include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup]
----
[%header.monospaced.styled,format=dsv,separator=|]
|===
include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup-result]
|===
[[esql-stats-mv-group]]
If the grouping key is multivalued then the input row is in all groups:
@ -109,7 +138,7 @@ It's also possible to group by multiple values:
include::{esql-specs}/stats.csv-spec[tag=statsGroupByMultipleValues]
----
If the all grouping keys are multivalued then the input row is in all groups:
If all the grouping keys are multivalued then the input row is in all groups:
[source.merge.styled,esql]
----
@ -121,7 +150,7 @@ include::{esql-specs}/stats.csv-spec[tag=multi-mv-group-result]
|===
Both the aggregating functions and the grouping expressions accept other
functions. This is useful for using `STATS...BY` on multivalue columns.
functions. This is useful for using `STATS` on multivalue columns.
For example, to calculate the average salary change, you can use `MV_AVG` to
first average the multiple values per employee, and use the result with the
`AVG` function:

View file

@ -53,8 +53,8 @@ https://docs.oracle.com/en/java/javase/17/security/java-cryptography-architectur
https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html[JSSE] implementation is required
so that the JVM uses FIPS validated implementations of NIST recommended cryptographic algorithms.
Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.4/bc-fips-1.0.2.4.jar[bc-fips 1.0.2.4]
and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.17/bctls-fips-1.0.17.jar[bctls-fips 1.0.17].
Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.5/bc-fips-1.0.2.5.jar[bc-fips 1.0.2.5]
and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.19/bctls-fips-1.0.19.jar[bctls-fips 1.0.19].
Please refer to the {es}
https://www.elastic.co/support/matrix#matrix_jvm[JVM support matrix] for details on which combinations of JVM and security provider are supported in FIPS mode. Elasticsearch does not ship with a FIPS certified provider. It is the responsibility of the user
to install and configure the security provider to ensure compliance with FIPS 140-2. Using a FIPS certified provider will ensure that only

View file

@ -1201,6 +1201,11 @@
<sha256 value="e7cd8951956d349b568b7ccfd4f5b2529a8c113e67c32b028f52ffda371259d9" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="commons-cli" name="commons-cli" version="1.5.0">
<artifact name="commons-cli-1.5.0.jar">
<sha256 value="bc8bb01fc0fad250385706e20f927ddcff6173f6339b387dc879237752567ac6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="commons-codec" name="commons-codec" version="1.11">
<artifact name="commons-codec-1.11.jar">
<sha256 value="e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d" origin="Generated by Gradle"/>
@ -1256,16 +1261,16 @@
<sha256 value="05662373044f3dff112567b7bb5dfa1174e91e074c0c727b4412788013f49d56" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="commons-io" name="commons-io" version="2.11.0">
<artifact name="commons-io-2.11.0.jar">
<sha256 value="961b2f6d87dbacc5d54abf45ab7a6e2495f89b75598962d8c723cea9bc210908" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="commons-io" name="commons-io" version="2.15.1">
<artifact name="commons-io-2.15.1.jar">
<sha256 value="a58af12ee1b68cfd2ebb0c27caef164f084381a00ec81a48cc275fd7ea54e154" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="commons-io" name="commons-io" version="2.16.1">
<artifact name="commons-io-2.16.1.jar">
<sha256 value="f41f7baacd716896447ace9758621f62c1c6b0a91d89acee488da26fc477c84f" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="commons-io" name="commons-io" version="2.17.0">
<artifact name="commons-io-2.17.0.jar">
<sha256 value="4aa4ca48f3dfd30b78220b7881d8cb93eac4093ec94361b6befa9487998a550b" origin="Generated by Gradle"/>
@ -2372,14 +2377,14 @@
<sha256 value="a29e54e5c9d03f52ea28a544342140f73870a5bca9a9789988a66c199b01dd8a" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.apache.hadoop" name="hadoop-client-api" version="3.3.3">
<artifact name="hadoop-client-api-3.3.3.jar">
<sha256 value="447292218fb52aac173efeea549bdc7e2392c6445d33ae28c38a4d9b90d1d31a" origin="Generated by Gradle"/>
<component group="org.apache.hadoop" name="hadoop-client-api" version="3.4.1">
<artifact name="hadoop-client-api-3.4.1.jar">
<sha256 value="a964d4daa054c9615bbafb4553efbb140fa7fb9ac6f358a24393f183a5703438" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.apache.hadoop" name="hadoop-client-runtime" version="3.3.3">
<artifact name="hadoop-client-runtime-3.3.3.jar">
<sha256 value="e2151873292cb48da649e4a51ce43009faf3528af1778aa64919f7514541ca27" origin="Generated by Gradle"/>
<component group="org.apache.hadoop" name="hadoop-client-runtime" version="3.4.1">
<artifact name="hadoop-client-runtime-3.4.1.jar">
<sha256 value="f6a800a159f918670db533606d33560d6c13b7e13f14eda493280ae33b9eeb2f" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.apache.hadoop" name="hadoop-common" version="2.8.5">
@ -2414,9 +2419,9 @@
<sha256 value="581e11802400f77010937e1aa2b5ed48b2dca26db054fb9493ffe799e562cd57" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.apache.hadoop" name="hadoop-hdfs" version="3.3.3">
<artifact name="hadoop-hdfs-3.3.3.jar">
<sha256 value="544a835362ea112476f05d21d6a16b0f20cdcf3467f170cef762cffdac9166c7" origin="Generated by Gradle"/>
<component group="org.apache.hadoop" name="hadoop-hdfs" version="3.4.1">
<artifact name="hadoop-hdfs-3.4.1.jar">
<sha256 value="91d1bcdb554dcf95d2daa12c7c0ef706c64692c21e9edcf2059d4683bedb0cc6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.apache.hadoop" name="hadoop-hdfs-client" version="2.8.5">
@ -3288,14 +3293,14 @@
<sha256 value="d749db58c2bd353f1c03541d747b753931d4b84da8e48993ef51efe8694b4ed7" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.bouncycastle" name="bc-fips" version="1.0.2.4">
<artifact name="bc-fips-1.0.2.4.jar">
<sha256 value="703ecd8a3a619800269bc8cd442f2ebf469bd2fe70478364f58ddc6460c35f9f" origin="Generated by Gradle"/>
<component group="org.bouncycastle" name="bc-fips" version="1.0.2.5">
<artifact name="bc-fips-1.0.2.5.jar">
<sha256 value="50e4c7a0d0c68413d3d8587560d56945ac09e7c89c41bd971cd22d76be6f1085" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.bouncycastle" name="bc-fips-debug" version="1.0.2.4">
<artifact name="bc-fips-debug-1.0.2.4.jar">
<sha256 value="a025e947c9c91d023bf2a0a3a74d78d5f8b9f6f0f4de13dc52025f2b996a306b" origin="Generated by Gradle"/>
<component group="org.bouncycastle" name="bc-fips-debug" version="1.0.2.5">
<artifact name="bc-fips-debug-1.0.2.5.jar">
<sha256 value="5cfda7e020c5c1a3b1724386f139957472e551494254b8fc74e34f73590fc605" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.bouncycastle" name="bcpg-fips" version="1.0.7.1">
@ -3333,9 +3338,9 @@
<sha256 value="add5915e6acfc6ab5836e1fd8a5e21c6488536a8c1f21f386eeb3bf280b702d7" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.bouncycastle" name="bctls-fips" version="1.0.17">
<artifact name="bctls-fips-1.0.17.jar">
<sha256 value="51dfd28ec370f27ba4efc10ec8e21129e34e2f2340ac465a6d17a468e0a4696d" origin="Generated by Gradle"/>
<component group="org.bouncycastle" name="bctls-fips" version="1.0.19">
<artifact name="bctls-fips-1.0.19.jar">
<sha256 value="a0bbad2eb5268f1baa08f0e2e69cb61cd292e19e73595c620d586d335d97d1a8" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.bouncycastle" name="bcutil-jdk18on" version="1.78.1">

View file

@ -8,7 +8,6 @@
*/
apply plugin: 'elasticsearch.publish'
apply plugin: 'elasticsearch.mrjar'
dependencies {
// This dependency is used only by :libs:core for null-checking interop with other tools

View file

@ -36,6 +36,22 @@ import static org.objectweb.asm.Opcodes.INVOKESTATIC;
import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
public class InstrumenterImpl implements Instrumenter {
private static final String checkerClassDescriptor;
private static final String handleClass;
static {
int javaVersion = Runtime.version().feature();
final String classNamePrefix;
if (javaVersion >= 23) {
classNamePrefix = "Java23";
} else {
classNamePrefix = "";
}
String checkerClass = "org/elasticsearch/entitlement/bridge/" + classNamePrefix + "EntitlementChecker";
handleClass = checkerClass + "Handle";
checkerClassDescriptor = Type.getObjectType(checkerClass).getDescriptor();
}
/**
* To avoid class name collisions during testing without an agent to replace classes in-place.
*/
@ -269,13 +285,7 @@ public class InstrumenterImpl implements Instrumenter {
}
protected void pushEntitlementChecker(MethodVisitor mv) {
mv.visitMethodInsn(
INVOKESTATIC,
"org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle",
"instance",
"()Lorg/elasticsearch/entitlement/bridge/EntitlementChecker;",
false
);
mv.visitMethodInsn(INVOKESTATIC, handleClass, "instance", "()" + checkerClassDescriptor, false);
}
public record ClassFileInfo(String fileName, byte[] bytecodes) {}

View file

@ -7,19 +7,18 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask
apply plugin: 'elasticsearch.build'
apply plugin: 'elasticsearch.mrjar'
configurations {
bridgeJar {
canBeConsumed = true
canBeResolved = false
tasks.named('jar').configure {
// guarding for intellij
if (sourceSets.findByName("main23")) {
from sourceSets.main23.output
}
}
artifacts {
bridgeJar(jar)
}
tasks.named('forbiddenApisMain').configure {
tasks.withType(CheckForbiddenApisTask).configureEach {
replaceSignatureFiles 'jdk-signatures'
}

View file

@ -9,9 +9,6 @@
package org.elasticsearch.entitlement.bridge;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* Makes the {@link EntitlementChecker} available to injected bytecode.
*/
@ -35,27 +32,7 @@ public class EntitlementCheckerHandle {
* The {@code EntitlementInitialization} class is what actually instantiates it and makes it available;
* here, we copy it into a static final variable for maximum performance.
*/
private static final EntitlementChecker instance;
static {
String initClazz = "org.elasticsearch.entitlement.initialization.EntitlementInitialization";
final Class<?> clazz;
try {
clazz = ClassLoader.getSystemClassLoader().loadClass(initClazz);
} catch (ClassNotFoundException e) {
throw new AssertionError("java.base cannot find entitlement initialziation", e);
}
final Method checkerMethod;
try {
checkerMethod = clazz.getMethod("checker");
} catch (NoSuchMethodException e) {
throw new AssertionError("EntitlementInitialization is missing checker() method", e);
}
try {
instance = (EntitlementChecker) checkerMethod.invoke(null);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new AssertionError(e);
}
}
private static final EntitlementChecker instance = HandleLoader.load(EntitlementChecker.class);
}
// no construction

View file

@ -0,0 +1,40 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.bridge;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
class HandleLoader {
static <T extends EntitlementChecker> T load(Class<T> checkerClass) {
String initClassName = "org.elasticsearch.entitlement.initialization.EntitlementInitialization";
final Class<?> initClazz;
try {
initClazz = ClassLoader.getSystemClassLoader().loadClass(initClassName);
} catch (ClassNotFoundException e) {
throw new AssertionError("java.base cannot find entitlement initialization", e);
}
final Method checkerMethod;
try {
checkerMethod = initClazz.getMethod("checker");
} catch (NoSuchMethodException e) {
throw new AssertionError("EntitlementInitialization is missing checker() method", e);
}
try {
return checkerClass.cast(checkerMethod.invoke(null));
} catch (IllegalAccessException | InvocationTargetException e) {
throw new AssertionError(e);
}
}
// no instance
private HandleLoader() {}
}

View file

@ -7,21 +7,6 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.gradle.internal.distribution;
package org.elasticsearch.entitlement.bridge;
import org.elasticsearch.gradle.ElasticsearchDistributionType;
public class DockerUbiElasticsearchDistributionType implements ElasticsearchDistributionType {
DockerUbiElasticsearchDistributionType() {}
@Override
public String getName() {
return "dockerUbi";
}
@Override
public boolean isDocker() {
return true;
}
}
public interface Java23EntitlementChecker extends EntitlementChecker {}

View file

@ -0,0 +1,27 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.bridge;
/**
* Java23 variant of {@link EntitlementChecker} handle holder.
*/
public class Java23EntitlementCheckerHandle {
public static Java23EntitlementChecker instance() {
return Holder.instance;
}
private static class Holder {
private static final Java23EntitlementChecker instance = HandleLoader.load(Java23EntitlementChecker.class);
}
// no construction
private Java23EntitlementCheckerHandle() {}
}

View file

@ -6,10 +6,13 @@
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask
apply plugin: 'elasticsearch.build'
apply plugin: 'elasticsearch.publish'
apply plugin: 'elasticsearch.embedded-providers'
apply plugin: 'elasticsearch.mrjar'
embeddedProviders {
impl 'entitlement', project(':libs:entitlement:asm-provider')
@ -23,8 +26,13 @@ dependencies {
testImplementation(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'entitlement'
}
// guarding for intellij
if (sourceSets.findByName("main23")) {
main23CompileOnly project(path: ':libs:entitlement:bridge', configuration: 'java23')
}
}
tasks.named('forbiddenApisMain').configure {
tasks.withType(CheckForbiddenApisTask).configureEach {
replaceSignatureFiles 'jdk-signatures'
}

View file

@ -27,6 +27,8 @@ import java.io.IOException;
import java.lang.instrument.Instrumentation;
import java.lang.module.ModuleFinder;
import java.lang.module.ModuleReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
@ -59,7 +61,7 @@ public class EntitlementInitialization {
// Note: referenced by agent reflectively
public static void initialize(Instrumentation inst) throws Exception {
manager = new ElasticsearchEntitlementChecker(createPolicyManager());
manager = initChecker();
Map<MethodKey, CheckerMethod> methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument(
"org.elasticsearch.entitlement.bridge.EntitlementChecker"
@ -137,6 +139,36 @@ public class EntitlementInitialization {
return Set.of(ALL_UNNAMED);
}
private static ElasticsearchEntitlementChecker initChecker() throws IOException {
final PolicyManager policyManager = createPolicyManager();
int javaVersion = Runtime.version().feature();
final String classNamePrefix;
if (javaVersion >= 23) {
classNamePrefix = "Java23";
} else {
classNamePrefix = "";
}
final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker";
Class<?> clazz;
try {
clazz = Class.forName(className);
} catch (ClassNotFoundException e) {
throw new AssertionError("entitlement lib cannot find entitlement impl", e);
}
Constructor<?> constructor;
try {
constructor = clazz.getConstructor(PolicyManager.class);
} catch (NoSuchMethodException e) {
throw new AssertionError("entitlement impl is missing no arg constructor", e);
}
try {
return (ElasticsearchEntitlementChecker) constructor.newInstance(policyManager);
} catch (IllegalAccessException | InvocationTargetException | InstantiationException e) {
throw new AssertionError(e);
}
}
private static String internalName(Class<?> c) {
return c.getName().replace('.', '/');
}

View file

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.runtime.api;
import org.elasticsearch.entitlement.bridge.Java23EntitlementChecker;
import org.elasticsearch.entitlement.runtime.policy.PolicyManager;
public class Java23ElasticsearchEntitlementChecker extends ElasticsearchEntitlementChecker implements Java23EntitlementChecker {
public Java23ElasticsearchEntitlementChecker(PolicyManager policyManager) {
super(policyManager);
}
@Override
public void check$java_lang_System$exit(Class<?> callerClass, int status) {
// TODO: this is just an example, we shouldn't really override a method implemented in the superclass
super.check$java_lang_System$exit(callerClass, status);
}
}

View file

@ -7,9 +7,8 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
plugins {
id 'java'
apply plugin: 'elasticsearch.build'
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures'
}
group = 'org.elasticsearch.entitlement.tools'

View file

@ -1,5 +1,3 @@
import org.elasticsearch.gradle.internal.info.BuildParams
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License

View file

@ -7,7 +7,6 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask
apply plugin: 'elasticsearch.publish'

View file

@ -1,5 +1,3 @@
import org.elasticsearch.gradle.internal.info.BuildParams
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License

View file

@ -81,14 +81,12 @@ public class InternalAdjacencyMatrix extends InternalMultiBucketAggregation<Inte
return aggregations;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
private void bucketToXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CommonFields.KEY.getPreferredName(), key);
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
return builder;
}
@Override
@ -237,7 +235,7 @@ public class InternalAdjacencyMatrix extends InternalMultiBucketAggregation<Inte
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startArray(CommonFields.BUCKETS.getPreferredName());
for (InternalBucket bucket : buckets) {
bucket.toXContent(builder, params);
bucket.bucketToXContent(builder, params);
}
builder.endArray();
return builder;

View file

@ -99,8 +99,7 @@ public final class InternalAutoDateHistogram extends InternalMultiBucketAggregat
return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
private void bucketToXContent(XContentBuilder builder, Params params, DocValueFormat format) throws IOException {
String keyAsString = format.format(key).toString();
builder.startObject();
if (format != DocValueFormat.RAW) {
@ -110,7 +109,6 @@ public final class InternalAutoDateHistogram extends InternalMultiBucketAggregat
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
return builder;
}
@Override
@ -597,7 +595,7 @@ public final class InternalAutoDateHistogram extends InternalMultiBucketAggregat
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startArray(CommonFields.BUCKETS.getPreferredName());
for (Bucket bucket : buckets) {
bucket.toXContent(builder, params);
bucket.bucketToXContent(builder, params, format);
}
builder.endArray();
builder.field("interval", getInterval().toString());

View file

@ -36,24 +36,21 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
*/
public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket {
protected long bucketOrd;
protected final boolean keyed;
protected final BytesRef key;
// TODO: make computing docCount optional
protected long docCount;
protected InternalAggregations aggregations;
public InternalBucket(BytesRef key, long docCount, InternalAggregations aggregations, boolean keyed) {
public InternalBucket(BytesRef key, long docCount, InternalAggregations aggregations) {
this.key = key;
this.docCount = docCount;
this.aggregations = aggregations;
this.keyed = keyed;
}
/**
* Read from a stream.
*/
public InternalBucket(StreamInput in, boolean keyed) throws IOException {
this.keyed = keyed;
public InternalBucket(StreamInput in) throws IOException {
key = in.readBytesRef();
docCount = in.readVLong();
aggregations = InternalAggregations.readFrom(in);
@ -86,8 +83,7 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
return aggregations;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException {
// Use map key in the xcontent response:
var key = getKey();
if (keyed) {
@ -99,7 +95,6 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
return builder;
}
@Override
@ -112,14 +107,13 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
}
InternalTimeSeries.InternalBucket that = (InternalTimeSeries.InternalBucket) other;
return Objects.equals(key, that.key)
&& Objects.equals(keyed, that.keyed)
&& Objects.equals(docCount, that.docCount)
&& Objects.equals(aggregations, that.aggregations);
}
@Override
public int hashCode() {
return Objects.hash(getClass(), key, keyed, docCount, aggregations);
return Objects.hash(getClass(), key, docCount, aggregations);
}
}
@ -143,7 +137,7 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
int size = in.readVInt();
List<InternalTimeSeries.InternalBucket> buckets = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
buckets.add(new InternalTimeSeries.InternalBucket(in, keyed));
buckets.add(new InternalTimeSeries.InternalBucket(in));
}
this.buckets = buckets;
this.bucketMap = null;
@ -162,7 +156,7 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
builder.startArray(CommonFields.BUCKETS.getPreferredName());
}
for (InternalBucket bucket : buckets) {
bucket.toXContent(builder, params);
bucket.bucketToXContent(builder, params, keyed);
}
if (keyed) {
builder.endObject();
@ -252,14 +246,14 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation<InternalT
@Override
public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) {
return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed);
return new InternalBucket(prototype.key, prototype.docCount, aggregations);
}
private InternalBucket reduceBucket(List<InternalBucket> buckets, AggregationReduceContext context) {
InternalTimeSeries.InternalBucket reduced = null;
for (InternalTimeSeries.InternalBucket bucket : buckets) {
if (reduced == null) {
reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed);
reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations);
} else {
reduced.docCount += bucket.docCount;
}

View file

@ -83,8 +83,7 @@ public class TimeSeriesAggregator extends BucketsAggregator {
InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket(
BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here.
docCount,
null,
keyed
null
);
bucket.bucketOrd = ordsEnum.ord();
buckets.add(bucket);

View file

@ -49,7 +49,7 @@ public class InternalTimeSeriesTests extends AggregationMultiBucketAggregationTe
}
try {
var key = TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef();
bucketList.add(new InternalBucket(key, docCount, aggregations, keyed));
bucketList.add(new InternalBucket(key, docCount, aggregations));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
@ -108,10 +108,10 @@ public class InternalTimeSeriesTests extends AggregationMultiBucketAggregationTe
InternalTimeSeries first = new InternalTimeSeries(
"ts",
List.of(
new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY, false),
new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY, false),
new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY, false),
new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY, false)
new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY),
new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY),
new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY),
new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY)
),
false,
Map.of()
@ -119,8 +119,8 @@ public class InternalTimeSeriesTests extends AggregationMultiBucketAggregationTe
InternalTimeSeries second = new InternalTimeSeries(
"ts",
List.of(
new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY, false),
new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY, false)
new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY),
new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY)
),
false,
Map.of()
@ -128,9 +128,9 @@ public class InternalTimeSeriesTests extends AggregationMultiBucketAggregationTe
InternalTimeSeries third = new InternalTimeSeries(
"ts",
List.of(
new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY, false),
new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY, false),
new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY, false)
new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY),
new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY),
new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY)
),
false,
Map.of()

View file

@ -176,19 +176,19 @@ public class TimeSeriesAggregatorTests extends AggregationTestCase {
InternalDateHistogram byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=xxx}").getAggregations().get("by_timestamp");
assertThat(
byTimeStampBucket.getBuckets(),
contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY))
contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY))
);
assertThat(ts.getBucketByKey("{dim1=aaa, dim2=yyy}").docCount, equalTo(2L));
byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=yyy}").getAggregations().get("by_timestamp");
assertThat(
byTimeStampBucket.getBuckets(),
contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY))
contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY))
);
assertThat(ts.getBucketByKey("{dim1=bbb, dim2=zzz}").docCount, equalTo(4L));
byTimeStampBucket = ts.getBucketByKey("{dim1=bbb, dim2=zzz}").getAggregations().get("by_timestamp");
assertThat(
byTimeStampBucket.getBuckets(),
contains(new InternalDateHistogram.Bucket(startTime, 4, false, null, InternalAggregations.EMPTY))
contains(new InternalDateHistogram.Bucket(startTime, 4, null, InternalAggregations.EMPTY))
);
};

View file

@ -1,4 +1,3 @@
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask
apply plugin: 'elasticsearch.test-with-dependencies'

View file

@ -1,5 +1,3 @@
import org.elasticsearch.gradle.internal.info.BuildParams
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License

View file

@ -9,7 +9,6 @@
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask
apply plugin: 'elasticsearch.internal-java-rest-test'

View file

@ -7,8 +7,6 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.elasticsearch.gradle.internal.info.BuildParams
apply plugin: 'elasticsearch.internal-cluster-test'
esplugin {

View file

@ -7,10 +7,8 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.elasticsearch.gradle.internal.info.BuildParams
apply plugin: 'elasticsearch.legacy-yaml-rest-test'
apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-yaml-rest-test'
apply plugin: 'elasticsearch.yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-cluster-test'
esplugin {
@ -23,9 +21,3 @@ restResources {
include '_common', 'cluster', 'field_caps', 'nodes', 'indices', 'index', 'search', 'get'
}
}
if (buildParams.isSnapshotBuild() == false) {
tasks.named("test").configure {
systemProperty 'es.index_mode_feature_flag_registered', 'true'
}
}

View file

@ -12,8 +12,10 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.junit.ClassRule;
/** Runs yaml rest tests */
public class MapperExtrasClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
@ -26,4 +28,12 @@ public class MapperExtrasClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("mapper-extras").build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}

View file

@ -6,8 +6,8 @@
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
apply plugin: 'elasticsearch.legacy-yaml-rest-test'
apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-yaml-rest-test'
apply plugin: 'elasticsearch.yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-cluster-test'
esplugin {

View file

@ -12,8 +12,10 @@ package org.elasticsearch.join;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.junit.ClassRule;
public class ParentChildClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public ParentChildClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
@ -24,4 +26,12 @@ public class ParentChildClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase
public static Iterable<Object[]> parameters() throws Exception {
return createParameters();
}
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("parent-join").build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}

View file

@ -6,8 +6,8 @@
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
apply plugin: 'elasticsearch.legacy-yaml-rest-test'
apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-yaml-rest-test'
apply plugin: 'elasticsearch.yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-cluster-test'
esplugin {

View file

@ -12,8 +12,10 @@ package org.elasticsearch.percolator;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.junit.ClassRule;
public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
@ -24,4 +26,12 @@ public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("percolator").build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}

View file

@ -10,15 +10,14 @@
import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.OS
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.AntFixture
import org.elasticsearch.gradle.transform.UnzipTransform
apply plugin: 'elasticsearch.test-with-dependencies'
apply plugin: 'elasticsearch.jdk-download'
apply plugin: 'elasticsearch.legacy-yaml-rest-test'
apply plugin: 'elasticsearch.internal-yaml-rest-test'
apply plugin: 'elasticsearch.legacy-java-rest-test'
apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test'
apply plugin: 'elasticsearch.yaml-rest-compat-test'
apply plugin: 'elasticsearch.internal-cluster-test'
esplugin {
@ -33,7 +32,6 @@ testClusters.configureEach {
module ':modules:rest-root'
// Whitelist reindexing from the local node so we can test reindex-from-remote.
setting 'reindex.remote.whitelist', '127.0.0.1:*'
requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0")
}
dependencies {
@ -42,6 +40,10 @@ dependencies {
// for parent/child testing
testImplementation project(':modules:parent-join')
testImplementation project(':modules:rest-root')
clusterModules project(':modules:lang-painless')
clusterModules project(':modules:parent-join')
clusterModules project(":modules:rest-root")
}
restResources {

View file

@ -35,7 +35,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitC
* The test works as follows:
* 1. Start a large (reasonably long running) reindexing request on the coordinator-only node.
* 2. Check that the reindexing task appears on the coordinating node
* 3. With a 10s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING,
* 3. With a 60s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING,
* wait for the reindexing task to complete before closing the node
* 4. Confirm that the reindexing task succeeds with the wait (it will fail without it)
*/
@ -58,8 +58,9 @@ public class ReindexNodeShutdownIT extends ESIntegTestCase {
final String masterNodeName = internalCluster().startMasterOnlyNode();
final String dataNodeName = internalCluster().startDataOnlyNode();
/* Maximum time to wait for reindexing tasks to complete before shutdown */
final Settings COORD_SETTINGS = Settings.builder()
.put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(10))
.put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(60))
.build();
final String coordNodeName = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);
@ -118,7 +119,7 @@ public class ReindexNodeShutdownIT extends ESIntegTestCase {
internalCluster().stopNode(coordNodeName);
}
// Make sure all documents from the source index have been reindexed into the destination index
// Make sure all documents from the source index have been re-indexed into the destination index
private void checkDestinationIndex(String dataNodeName, int numDocs) throws Exception {
assertTrue(indexExists(DEST_INDEX));
flushAndRefresh(DEST_INDEX);

View file

@ -208,7 +208,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
assertIndexHasCorrectProperties(
finalMetadata,
".int-man-old-reindexed-for-8",
".int-man-old-reindexed-for-9",
INTERNAL_MANAGED_FLAG_VALUE,
true,
true,
@ -216,7 +216,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
);
assertIndexHasCorrectProperties(
finalMetadata,
".int-unman-old-reindexed-for-8",
".int-unman-old-reindexed-for-9",
INTERNAL_UNMANAGED_FLAG_VALUE,
false,
true,
@ -224,7 +224,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
);
assertIndexHasCorrectProperties(
finalMetadata,
".ext-man-old-reindexed-for-8",
".ext-man-old-reindexed-for-9",
EXTERNAL_MANAGED_FLAG_VALUE,
true,
false,
@ -232,7 +232,7 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
);
assertIndexHasCorrectProperties(
finalMetadata,
".ext-unman-old-reindexed-for-8",
".ext-unman-old-reindexed-for-9",
EXTERNAL_UNMANAGED_FLAG_VALUE,
false,
false,

View file

@ -218,7 +218,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
// Finally, verify that all the indices exist and have the properties we expect.
assertIndexHasCorrectProperties(
finalMetadata,
".int-man-old-reindexed-for-8",
".int-man-old-reindexed-for-9",
INTERNAL_MANAGED_FLAG_VALUE,
true,
true,
@ -226,7 +226,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
);
assertIndexHasCorrectProperties(
finalMetadata,
".int-unman-old-reindexed-for-8",
".int-unman-old-reindexed-for-9",
INTERNAL_UNMANAGED_FLAG_VALUE,
false,
true,
@ -234,7 +234,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
);
assertIndexHasCorrectProperties(
finalMetadata,
".ext-man-old-reindexed-for-8",
".ext-man-old-reindexed-for-9",
EXTERNAL_MANAGED_FLAG_VALUE,
true,
false,
@ -242,7 +242,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
);
assertIndexHasCorrectProperties(
finalMetadata,
".ext-unman-old-reindexed-for-8",
".ext-unman-old-reindexed-for-9",
EXTERNAL_UNMANAGED_FLAG_VALUE,
false,
false,
@ -251,7 +251,7 @@ public class MultiFeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
assertIndexHasCorrectProperties(
finalMetadata,
".second-int-man-old-reindexed-for-8",
".second-int-man-old-reindexed-for-9",
SECOND_FEATURE_IDX_FLAG_VALUE,
true,
true,

View file

@ -18,7 +18,6 @@ import java.io.IOException;
import static org.hamcrest.CoreMatchers.containsString;
public class ReindexWithoutContentIT extends ESRestTestCase {
public void testReindexMissingBody() throws IOException {
ResponseException responseException = expectThrows(
ResponseException.class,

View file

@ -12,8 +12,10 @@ package org.elasticsearch.index.reindex;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.junit.ClassRule;
public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
@ -24,4 +26,18 @@ public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.module("reindex")
.module("lang-painless")
.module("parent-join")
.module("rest-root")
.setting("reindex.remote.whitelist", "127.0.0.1:*")
.build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}

View file

@ -1,8 +1,3 @@
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
import org.elasticsearch.gradle.internal.test.RestIntegTestTask
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
@ -11,6 +6,11 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
import org.elasticsearch.gradle.internal.test.RestIntegTestTask
apply plugin: 'elasticsearch.internal-yaml-rest-test'
apply plugin: 'elasticsearch.internal-cluster-test'

View file

@ -112,7 +112,7 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
blobContainer.blobExists(purpose, blobName);
// Correct metrics are recorded
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics()
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics()
.withRequests(numThrottles + 1)
.withThrottles(numThrottles)
.withExceptions(numThrottles)
@ -137,7 +137,7 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
assertThrows(RequestedRangeNotSatisfiedException.class, () -> blobContainer.readBlob(purpose, blobName));
// Correct metrics are recorded
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB).expectMetrics()
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB, repository).expectMetrics()
.withRequests(1)
.withThrottles(0)
.withExceptions(1)
@ -170,7 +170,7 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
blobContainer.blobExists(purpose, blobName);
// Correct metrics are recorded
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics()
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics()
.withRequests(numErrors + 1)
.withThrottles(throttles.get())
.withExceptions(numErrors)
@ -191,7 +191,7 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
assertThrows(IOException.class, () -> blobContainer.listBlobs(purpose));
// Correct metrics are recorded
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS).expectMetrics()
metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS, repository).expectMetrics()
.withRequests(4)
.withThrottles(0)
.withExceptions(4)
@ -322,14 +322,20 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
.forEach(TestTelemetryPlugin::resetMeter);
}
private MetricsAsserter metricsAsserter(String dataNodeName, OperationPurpose operationPurpose, AzureBlobStore.Operation operation) {
return new MetricsAsserter(dataNodeName, operationPurpose, operation);
private MetricsAsserter metricsAsserter(
String dataNodeName,
OperationPurpose operationPurpose,
AzureBlobStore.Operation operation,
String repository
) {
return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository);
}
private class MetricsAsserter {
private final String dataNodeName;
private final OperationPurpose purpose;
private final AzureBlobStore.Operation operation;
private final String repository;
enum Result {
Success,
@ -355,10 +361,11 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
abstract List<Measurement> getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name);
}
private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation) {
private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation, String repository) {
this.dataNodeName = dataNodeName;
this.purpose = purpose;
this.operation = operation;
this.repository = repository;
}
private class Expectations {
@ -451,6 +458,7 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
.filter(
m -> m.attributes().get("operation").equals(operation.getKey())
&& m.attributes().get("purpose").equals(purpose.getKey())
&& m.attributes().get("repo_name").equals(repository)
&& m.attributes().get("repo_type").equals("azure")
)
.findFirst()
@ -462,6 +470,8 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito
+ operation.getKey()
+ " and purpose="
+ purpose.getKey()
+ " and repo_name="
+ repository
+ " in "
+ measurements
)

View file

@ -402,7 +402,10 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg
)
);
metrics.forEach(metric -> {
assertThat(metric.attributes(), allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("operation"), hasKey("purpose")));
assertThat(
metric.attributes(),
allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose"))
);
final AzureBlobStore.Operation operation = AzureBlobStore.Operation.fromKey((String) metric.attributes().get("operation"));
final AzureBlobStore.StatsKey statsKey = new AzureBlobStore.StatsKey(
operation,

View file

@ -9,7 +9,6 @@
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
import java.nio.file.Files

View file

@ -1,7 +1,3 @@
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
@ -10,6 +6,9 @@ import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin
apply plugin: 'elasticsearch.internal-yaml-rest-test'
apply plugin: 'elasticsearch.internal-cluster-test'
apply plugin: 'elasticsearch.internal-java-rest-test'

View file

@ -300,7 +300,10 @@ public class S3BlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTes
)
);
metrics.forEach(metric -> {
assertThat(metric.attributes(), allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("operation"), hasKey("purpose")));
assertThat(
metric.attributes(),
allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose"))
);
final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation"));
final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey(
operation,

View file

@ -327,6 +327,8 @@ class S3RetryingInputStream extends InputStream {
return Map.of(
"repo_type",
S3Repository.TYPE,
"repo_name",
blobStore.getRepositoryMetadata().name(),
"operation",
Operation.GET_OBJECT.getKey(),
"purpose",

View file

@ -1106,7 +1106,7 @@ public class S3BlobContainerRetriesTests extends AbstractBlobContainerRetriesTes
}
private Map<String, Object> metricAttributes(String action) {
return Map.of("repo_type", "s3", "operation", "GetObject", "purpose", "Indices", "action", action);
return Map.of("repo_type", "s3", "repo_name", "repository", "operation", "GetObject", "purpose", "Indices", "action", action);
}
/**

View file

@ -1,5 +1,3 @@
import org.elasticsearch.gradle.internal.info.BuildParams
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License

View file

@ -1,7 +1,4 @@
tests:
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/esql/esql-async-query-api/line_17}
issue: https://github.com/elastic/elasticsearch/issues/109260
- class: "org.elasticsearch.client.RestClientSingleHostIntegTests"
issue: "https://github.com/elastic/elasticsearch/issues/102717"
method: "testRequestResetAndAbort"
@ -14,12 +11,6 @@ tests:
- class: org.elasticsearch.smoketest.WatcherYamlRestIT
method: test {p0=watcher/usage/10_basic/Test watcher usage stats output}
issue: https://github.com/elastic/elasticsearch/issues/112189
- class: org.elasticsearch.xpack.esql.action.ManyShardsIT
method: testRejection
issue: https://github.com/elastic/elasticsearch/issues/112406
- class: org.elasticsearch.xpack.esql.action.ManyShardsIT
method: testConcurrentQueries
issue: https://github.com/elastic/elasticsearch/issues/112424
- class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/111497
- class: org.elasticsearch.packaging.test.PackagesSecurityAutoConfigurationTests
@ -127,9 +118,6 @@ tests:
- class: org.elasticsearch.search.SearchServiceTests
method: testParseSourceValidation
issue: https://github.com/elastic/elasticsearch/issues/115936
- class: org.elasticsearch.index.reindex.ReindexNodeShutdownIT
method: testReindexWithShutdown
issue: https://github.com/elastic/elasticsearch/issues/115996
- class: org.elasticsearch.search.query.SearchQueryIT
method: testAllDocsQueryString
issue: https://github.com/elastic/elasticsearch/issues/115728
@ -162,9 +150,6 @@ tests:
- class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT
method: testDeprecatedSettingsReturnWarnings
issue: https://github.com/elastic/elasticsearch/issues/108628
- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT
method: testEveryActionIsEitherOperatorOnlyOrNonOperator
issue: https://github.com/elastic/elasticsearch/issues/102992
- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests
method: testBottomFieldSort
issue: https://github.com/elastic/elasticsearch/issues/116249
@ -211,15 +196,6 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/116777
- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT
issue: https://github.com/elastic/elasticsearch/issues/116851
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testCategorizeWithinAggregations
issue: https://github.com/elastic/elasticsearch/issues/116856
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testCategorizeSingleGrouping
issue: https://github.com/elastic/elasticsearch/issues/116857
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testCategorizeNestedGrouping
issue: https://github.com/elastic/elasticsearch/issues/116858
- class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT
method: testRandomDirectoryIOExceptions
issue: https://github.com/elastic/elasticsearch/issues/114824
@ -229,9 +205,6 @@ tests:
- class: org.elasticsearch.upgrades.QueryBuilderBWCIT
method: testQueryBuilderBWC {cluster=UPGRADED}
issue: https://github.com/elastic/elasticsearch/issues/116990
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/esql/esql-across-clusters/line_197}
issue: https://github.com/elastic/elasticsearch/issues/117099
- class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT
method: test {yaml=/10_apm/Test template reinstallation}
issue: https://github.com/elastic/elasticsearch/issues/116445
@ -256,6 +229,15 @@ tests:
- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests
method: testRetryPointInTime
issue: https://github.com/elastic/elasticsearch/issues/117116
- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
method: testInferDeploysDefaultElser
issue: https://github.com/elastic/elasticsearch/issues/114913
- class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT
method: testCancelRequestWhenFailingFetchingPages
issue: https://github.com/elastic/elasticsearch/issues/117397
- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT
method: testEveryActionIsEitherOperatorOnlyOrNonOperator
issue: https://github.com/elastic/elasticsearch/issues/102992
# Examples:
#

View file

@ -1,5 +1,3 @@
import org.elasticsearch.gradle.internal.info.BuildParams
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License

Some files were not shown because too many files have changed in this diff Show more