Merge branch 'main' into multi-project

This commit is contained in:
Tim Vernum 2025-02-26 17:57:01 +11:00
commit 597ab8a7e9
137 changed files with 2214 additions and 928 deletions

View file

@ -15,6 +15,7 @@ import org.gradle.api.GradleException;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Transformer;
import org.gradle.api.invocation.Gradle;
import org.gradle.api.plugins.JavaBasePlugin;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.tasks.Copy;
@ -38,6 +39,15 @@ public class EclipseConventionPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getPlugins().apply(EclipsePlugin.class);
Gradle gradle = project.getGradle();
boolean isEclipse = project.getProviders().systemProperty("eclipse.launcher").isPresent() || // Gradle launched from Eclipse
project.getProviders().systemProperty("eclipse.application").isPresent() || // Gradle launched from the Eclipse compiler server
gradle.getStartParameter().getTaskNames().contains("eclipse") || // Gradle launched from the command line to do eclipse stuff
gradle.getStartParameter().getTaskNames().contains("cleanEclipse");
// for eclipse ide specific hacks...
project.getExtensions().add("isEclipse", isEclipse);
EclipseModel eclipseModel = project.getExtensions().getByType(EclipseModel.class);
EclipseProject eclipseProject = eclipseModel.getProject();

View file

@ -22,7 +22,7 @@ public enum DockerBase {
// Chainguard based wolfi image with latest jdk
// This is usually updated via renovatebot
// spotless:off
WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:d74b1fda6b7fee2c90b410df258e005c049e0672fe16d79d00e58f14fb69f90b",
WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:c66fdafe581a6ab1668a962015de4ce4666a60ed601d24f019f03bb4aaab8eeb",
"-wolfi",
"apk"
),

View file

@ -33,11 +33,15 @@ public abstract class OracleOpenJdkToolchainResolver extends AbstractCustomJavaT
String url(String os, String arch, String extension);
}
record ReleasedJdkBuild(JavaLanguageVersion languageVersion, String version, String buildNumber, String hash) implements JdkBuild {
record ReleaseJdkBuild(JavaLanguageVersion languageVersion, String host, String version, String buildNumber, String hash)
implements
JdkBuild {
@Override
public String url(String os, String arch, String extension) {
return "https://download.oracle.com/java/GA/jdk"
return "https://"
+ host
+ "/java/GA/jdk"
+ version
+ "/"
+ hash
@ -111,7 +115,8 @@ public abstract class OracleOpenJdkToolchainResolver extends AbstractCustomJavaT
// package private so it can be replaced by tests
List<JdkBuild> builds = List.of(
getBundledJdkBuild(),
new EarlyAccessJdkBuild(JavaLanguageVersion.of(24)),
// release candidate of JDK 24
new ReleaseJdkBuild(JavaLanguageVersion.of(24), "download.java.net", "24", "36", "1f9ff9062db4449d8ca828c504ffae90"),
new EarlyAccessJdkBuild(JavaLanguageVersion.of(25))
);
@ -125,7 +130,7 @@ public abstract class OracleOpenJdkToolchainResolver extends AbstractCustomJavaT
String baseVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "");
String build = jdkVersionMatcher.group(3);
String hash = jdkVersionMatcher.group(5);
return new ReleasedJdkBuild(bundledJdkMajorVersion, baseVersion, build, hash);
return new ReleaseJdkBuild(bundledJdkMajorVersion, "download.oracle.com", baseVersion, build, hash);
}
/**

View file

@ -30,9 +30,10 @@ class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec {
return null
}
}
toolChain.builds = toolChain.builds.findAll { it instanceof OracleOpenJdkToolchainResolver.EarlyAccessJdkBuild } + [
new OracleOpenJdkToolchainResolver.ReleasedJdkBuild(
toolChain.builds = toolChain.builds + [
new OracleOpenJdkToolchainResolver.ReleaseJdkBuild(
JavaLanguageVersion.of(20),
"download.oracle.com",
"20",
"36",
"bdc68b4b9cbc4ebcb30745c85038d91d"
@ -52,16 +53,16 @@ class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec {
[20, anyVendor(), LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_linux-aarch64_bin.tar.gz"],
[20, anyVendor(), WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_windows-x64_bin.zip"],
// https://download.java.net/java/early_access/jdk23/23/GPL/openjdk-23-ea+23_macos-aarch64_bin.tar.gz
[24, ORACLE, MAC_OS, X86_64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_macos-x64_bin.tar.gz"],
[24, ORACLE, MAC_OS, AARCH64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_macos-aarch64_bin.tar.gz"],
[24, ORACLE, LINUX, X86_64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_linux-x64_bin.tar.gz"],
[24, ORACLE, LINUX, AARCH64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_linux-aarch64_bin.tar.gz"],
[24, ORACLE, WINDOWS, X86_64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_windows-x64_bin.zip"],
[24, anyVendor(), MAC_OS, X86_64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_macos-x64_bin.tar.gz"],
[24, anyVendor(), MAC_OS, AARCH64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_macos-aarch64_bin.tar.gz"],
[24, anyVendor(), LINUX, X86_64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_linux-x64_bin.tar.gz"],
[24, anyVendor(), LINUX, AARCH64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_linux-aarch64_bin.tar.gz"],
[24, anyVendor(), WINDOWS, X86_64, "https://download.java.net/java/early_access/jdk24/29/GPL/openjdk-24-ea+29_windows-x64_bin.zip"]]
[24, ORACLE, MAC_OS, X86_64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-x64_bin.tar.gz"],
[24, ORACLE, MAC_OS, AARCH64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-aarch64_bin.tar.gz"],
[24, ORACLE, LINUX, X86_64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-x64_bin.tar.gz"],
[24, ORACLE, LINUX, AARCH64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-aarch64_bin.tar.gz"],
[24, ORACLE, WINDOWS, X86_64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_windows-x64_bin.zip"],
[24, anyVendor(), MAC_OS, X86_64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-x64_bin.tar.gz"],
[24, anyVendor(), MAC_OS, AARCH64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_macos-aarch64_bin.tar.gz"],
[24, anyVendor(), LINUX, X86_64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-x64_bin.tar.gz"],
[24, anyVendor(), LINUX, AARCH64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_linux-aarch64_bin.tar.gz"],
[24, anyVendor(), WINDOWS, X86_64, "https://download.java.net/java/GA/jdk24/1f9ff9062db4449d8ca828c504ffae90/36/GPL/openjdk-24_windows-x64_bin.zip"]]
}
@RestoreSystemProperties
@ -85,16 +86,6 @@ class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec {
where:
version | vendor | os | arch | expectedUrl
24 | ORACLE | MAC_OS | X86_64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_macos-x64_bin.tar.gz"
24 | ORACLE | MAC_OS | AARCH64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_macos-aarch64_bin.tar.gz"
24 | ORACLE | LINUX | X86_64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_linux-x64_bin.tar.gz"
24 | ORACLE | LINUX | AARCH64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_linux-aarch64_bin.tar.gz"
24 | ORACLE | WINDOWS | X86_64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_windows-x64_bin.zip"
24 | anyVendor() | MAC_OS | X86_64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_macos-x64_bin.tar.gz"
24 | anyVendor() | MAC_OS | AARCH64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_macos-aarch64_bin.tar.gz"
24 | anyVendor() | LINUX | X86_64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_linux-x64_bin.tar.gz"
24 | anyVendor() | LINUX | AARCH64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_linux-aarch64_bin.tar.gz"
24 | anyVendor() | WINDOWS | X86_64 | urlPrefix(24) + "42/GPL/openjdk-24-ea+42_windows-x64_bin.zip"
25 | ORACLE | MAC_OS | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_macos-x64_bin.tar.gz"
25 | ORACLE | MAC_OS | AARCH64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_macos-aarch64_bin.tar.gz"
25 | ORACLE | LINUX | X86_64 | urlPrefix(25) + "13/GPL/openjdk-25-ea+13_linux-x64_bin.tar.gz"

View file

@ -247,15 +247,6 @@ allprojects {
}
}
// injecting groovy property variables into all projects
project.ext {
// for ide hacks...
isEclipse = providers.systemProperty("eclipse.launcher").isPresent() || // Detects gradle launched from Eclipse's IDE
providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
gradle.startParameter.taskNames.contains('cleanEclipse')
}
ext.bwc_tests_enabled = bwc_tests_enabled
// eclipse configuration

View file

@ -28,7 +28,7 @@ final class SystemJvmOptions {
static List<String> systemJvmOptions(Settings nodeSettings, final Map<String, String> sysprops) {
String distroType = sysprops.get("es.distribution.type");
boolean isHotspot = sysprops.getOrDefault("sun.management.compiler", "").contains("HotSpot");
boolean entitlementsExplicitlyEnabled = Booleans.parseBoolean(sysprops.getOrDefault("es.entitlements.enabled", "false"));
boolean entitlementsExplicitlyEnabled = Booleans.parseBoolean(sysprops.getOrDefault("es.entitlements.enabled", "true"));
// java 24+ only supports entitlements, but it may be enabled on earlier versions explicitly
boolean useEntitlements = RuntimeVersionFeature.isSecurityManagerAvailable() == false || entitlementsExplicitlyEnabled;
return Stream.of(

View file

@ -0,0 +1,6 @@
pr: 121827
summary: Updates to allow using Cohere binary embedding response in semantic search
queries
area: Machine Learning
type: bug
issues: []

View file

@ -0,0 +1,6 @@
pr: 122886
summary: Add support to VALUES aggregation for spatial types
area: ES|QL
type: bug
issues:
- 122413

View file

@ -0,0 +1,10 @@
pr: 122960
summary: Deprecate Behavioral Analytics CRUD apis
area: Search
type: deprecation
issues: [ ]
deprecation:
title: Deprecate Behavioral Analytics CRUD apis
area: Search
details: Behavioral Analytics has been deprecated as of 9.0.0 and will be removed in a future release. The APIs will still work for now, but will emit warning headers that the API has been deprecated.
impact: Behavioral Analytics has been deprecated as of 9.0.0 and will be removed in a future release.

View file

@ -0,0 +1,5 @@
pr: 122999
summary: Store arrays offsets for ip fields natively with synthetic source
area: Mapping
type: enhancement
issues: []

View file

@ -0,0 +1,6 @@
pr: 123346
summary: Reduce license checks in `LicensedWriteLoadForecaster`
area: CRUD
type: bug
issues:
- 123247

View file

@ -0,0 +1,5 @@
pr: 123384
summary: Fixing serialization of `ScriptStats` `cache_evictions_history`
area: Stats
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 123403
summary: Use ordered maps for `PipelineConfiguration` xcontent deserialization
area: Ingest Node
type: bug
issues: []

View file

@ -2,6 +2,7 @@
[[delete-analytics-collection]]
=== Delete Analytics Collection
deprecated:[9.0.0]
beta::[]
++++
@ -14,15 +15,6 @@ beta::[]
For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs].
--
////
[source,console]
----
PUT _application/analytics/my_analytics_collection
----
// TESTSETUP
////
Removes a <<behavioral-analytics-overview,Behavioral Analytics>> Collection and its associated data stream.
[[delete-analytics-collection-request]]
@ -59,3 +51,4 @@ The following example deletes the Analytics Collection named `my_analytics_colle
----
DELETE _application/analytics/my_analytics_collection/
----
// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated]

View file

@ -1,6 +1,7 @@
[[behavioral-analytics-apis]]
== Behavioral Analytics APIs
deprecated:[9.0.0]
beta::[]
++++

View file

@ -2,6 +2,7 @@
[[list-analytics-collection]]
=== List Analytics Collections
deprecated:[9.0.0]
beta::[]
++++
@ -14,22 +15,6 @@ beta::[]
For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs].
--
////
[source,console]
----
PUT _application/analytics/my_analytics_collection
PUT _application/analytics/my_analytics_collection2
----
// TESTSETUP
[source,console]
----
DELETE _application/analytics/my_analytics_collection
DELETE _application/analytics/my_analytics_collection2
----
// TEARDOWN
////
Returns information about <<behavioral-analytics-overview,Behavioral Analytics>> Collections.
[[list-analytics-collection-request]]
@ -46,8 +31,9 @@ Requires the `manage_behavioral_analytics` cluster privilege.
==== {api-path-parms-title}
`<criteria>`::
(optional, string)
Criteria is used to find a matching analytics collection. This could be the name of the collection or a pattern to match multiple. If not specified, will return all analytics collections.
(optional, string) Criteria is used to find a matching analytics collection.
This could be the name of the collection or a pattern to match multiple.
If not specified, will return all analytics collections.
[[list-analytics-collection-response-codes]]
==== {api-response-codes-title}
@ -66,6 +52,7 @@ The following example lists all configured Analytics Collections:
----
GET _application/analytics/
----
// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated]
A sample response:
@ -91,6 +78,7 @@ The following example returns the Analytics Collection that matches `my_analytic
----
GET _application/analytics/my_analytics_collection
----
// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated]
A sample response:
@ -111,6 +99,7 @@ The following example returns all Analytics Collections prefixed with `my`:
----
GET _application/analytics/my*
----
// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated]
A sample response:

View file

@ -2,6 +2,7 @@
[[post-analytics-collection-event]]
=== Post Event to an Analytics Collection
deprecated:[9.0.0]
beta::[]
++++
@ -14,20 +15,6 @@ beta::[]
For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs].
--
////
[source,console]
----
PUT _application/analytics/my_analytics_collection
----
// TESTSETUP
[source,console]
----
DELETE _application/analytics/my_analytics_collection
----
// TEARDOWN
////
Post an event to a <<behavioral-analytics-overview,Behavioral Analytics>> Collection.
[[post-analytics-collection-event-request]]
@ -105,3 +92,4 @@ POST _application/analytics/my_analytics_collection/event/search_click
}
}
----
// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated]

View file

@ -2,6 +2,7 @@
[[put-analytics-collection]]
=== Put Analytics Collection
deprecated:[9.0.0]
beta::[]
++++
@ -14,14 +15,6 @@ beta::[]
For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs].
--
////
[source,console]
----
DELETE _application/analytics/my_analytics_collection
----
// TEARDOWN
////
Creates a <<behavioral-analytics-overview,Behavioral Analytics>> Collection.
[[put-analytics-collection-request]]
@ -55,3 +48,4 @@ The following example creates a new Analytics Collection called `my_analytics_co
----
PUT _application/analytics/my_analytics_collection
----
// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated]

View file

@ -16,6 +16,30 @@
"variadic" : false,
"returnType" : "boolean"
},
{
"params" : [
{
"name" : "field",
"type" : "cartesian_point",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "cartesian_point"
},
{
"params" : [
{
"name" : "field",
"type" : "cartesian_shape",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "cartesian_shape"
},
{
"params" : [
{
@ -52,6 +76,30 @@
"variadic" : false,
"returnType" : "double"
},
{
"params" : [
{
"name" : "field",
"type" : "geo_point",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "geo_point"
},
{
"params" : [
{
"name" : "field",
"type" : "geo_shape",
"optional" : false,
"description" : ""
}
],
"variadic" : false,
"returnType" : "geo_shape"
},
{
"params" : [
{

View file

@ -1,9 +1,12 @@
[[behavioral-analytics-api]]
=== Behavioral Analytics API overview
++++
<titleabbrev>API overview</titleabbrev>
++++
deprecated:[9.0.0]
This page outlines all the APIs available for behavioral analytics and links to their documentation.
[discrete]

View file

@ -4,6 +4,8 @@
<titleabbrev>Set up CORs</titleabbrev>
++++
deprecated:[9.0.0]
Behavioral Analytics sends events directly to the {es} API.
This means that the browser makes requests to the {es} API directly.
{es} supports https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS[Cross-Origin Resource Sharing (CORS)^], but this feature is disabled by default.

View file

@ -4,6 +4,8 @@
<titleabbrev>Events reference</titleabbrev>
++++
deprecated:[9.0.0]
Behavioral Analytics logs events using the {ecs-ref}/ecs-reference.html[Elastic Common Schema^], including a custom field set for analytics events.
Refer to <<behavioral-analytics-event-reference-examples,examples>> of the full data objects that are logged.

View file

@ -4,6 +4,8 @@
<titleabbrev>View events</titleabbrev>
++++
deprecated:[9.0.0]
[TIP]
====
Refer to <<behavioral-analytics-event-reference>> for a complete list of the fields logged by events.

View file

@ -1,6 +1,9 @@
[[behavioral-analytics-overview]]
== Search analytics
deprecated:[9.0.0]
Behavioral Analytics is an analytics event collection platform.
Use these tools to analyze your users' searching and clicking behavior.
Leverage this information to improve the relevance of your search results and identify gaps in your content.

View file

@ -4,6 +4,8 @@
<titleabbrev>Get started</titleabbrev>
++++
deprecated:[9.0.0]
You can manage your analytics in the {kib} UI.
Go to *Search > Behavioral Analytics* to get started.

View file

@ -14,19 +14,17 @@ import com.sun.tools.attach.AgentLoadException;
import com.sun.tools.attach.AttachNotSupportedException;
import com.sun.tools.attach.VirtualMachine;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.entitlement.initialization.EntitlementInitialization;
import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
import org.elasticsearch.entitlement.runtime.policy.Policy;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Stream;
@ -43,8 +41,11 @@ public class EntitlementBootstrap {
Path[] sharedRepoDirs,
Path configDir,
Path libDir,
Path pluginsDir,
Path logsDir,
Path tempDir
Path tempDir,
Path pidFile,
Set<Class<?>> suppressFailureLogClasses
) {
public BootstrapArgs {
requireNonNull(pluginPolicies);
@ -58,8 +59,10 @@ public class EntitlementBootstrap {
requireNonNull(sharedRepoDirs);
requireNonNull(configDir);
requireNonNull(libDir);
requireNonNull(pluginsDir);
requireNonNull(logsDir);
requireNonNull(tempDir);
requireNonNull(suppressFailureLogClasses);
}
}
@ -81,8 +84,11 @@ public class EntitlementBootstrap {
* @param sharedRepoDirs shared repository directories for Elasticsearch
* @param configDir the config directory for Elasticsearch
* @param libDir the lib directory for Elasticsearch
* @param pluginsDir the directory where plugins are installed for Elasticsearch
* @param tempDir the temp directory for Elasticsearch
* @param logsDir the log directory for Elasticsearch
* @param pidFile path to a pid file for Elasticsearch, or {@code null} if one was not specified
* @param suppressFailureLogClasses classes for which we do not need or want to log Entitlements failures
*/
public static void bootstrap(
Map<String, Policy> pluginPolicies,
@ -93,8 +99,11 @@ public class EntitlementBootstrap {
Path[] sharedRepoDirs,
Path configDir,
Path libDir,
Path pluginsDir,
Path logsDir,
Path tempDir
Path tempDir,
Path pidFile,
Set<Class<?>> suppressFailureLogClasses
) {
logger.debug("Loading entitlement agent");
if (EntitlementBootstrap.bootstrapArgs != null) {
@ -109,12 +118,14 @@ public class EntitlementBootstrap {
sharedRepoDirs,
configDir,
libDir,
pluginsDir,
logsDir,
tempDir
tempDir,
pidFile,
suppressFailureLogClasses
);
exportInitializationToAgent();
loadAgent(findAgentJar());
selfTest();
}
@SuppressForbidden(reason = "The VirtualMachine API is the only way to attach a java agent dynamically")
@ -160,50 +171,5 @@ public class EntitlementBootstrap {
}
}
/**
* Attempt a few sensitive operations to ensure that some are permitted and some are forbidden.
* <p>
*
* This serves two purposes:
*
* <ol>
* <li>
* a smoke test to make sure the entitlements system is not completely broken, and
* </li>
* <li>
* an early test of certain important operations so they don't fail later on at an awkward time.
* </li>
* </ol>
*
* @throws IllegalStateException if the entitlements system can't prevent an unauthorized action of our choosing
*/
private static void selfTest() {
ensureCannotStartProcess(ProcessBuilder::start);
// Try again with reflection
ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess);
}
private static void ensureCannotStartProcess(CheckedConsumer<ProcessBuilder, ?> startProcess) {
try {
// The command doesn't matter; it doesn't even need to exist
startProcess.accept(new ProcessBuilder(""));
} catch (NotEntitledException e) {
logger.debug("Success: Entitlement protection correctly prevented process creation");
return;
} catch (Exception e) {
throw new IllegalStateException("Failed entitlement protection self-test", e);
}
throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted");
}
private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception {
try {
var start = ProcessBuilder.class.getMethod("start");
start.invoke(pb);
} catch (InvocationTargetException e) {
throw (Exception) e.getCause();
}
}
private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class);
}

View file

@ -148,35 +148,11 @@ public class EntitlementInitialization {
);
List<Scope> serverScopes = new ArrayList<>();
List<FileData> serverModuleFileDatas = new ArrayList<>();
Collections.addAll(
serverScopes,
new Scope(
"org.elasticsearch.base",
List.of(
new CreateClassLoaderEntitlement(),
new FilesEntitlement(
List.of(
FileData.ofRelativePath(Path.of(""), SHARED_REPO, READ_WRITE),
FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE)
)
)
)
),
new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())),
new Scope(
"org.elasticsearch.server",
List.of(
new ExitVMEntitlement(),
new ReadStoreAttributesEntitlement(),
new CreateClassLoaderEntitlement(),
new InboundNetworkEntitlement(),
new OutboundNetworkEntitlement(),
new LoadNativeLibrariesEntitlement(),
new ManageThreadsEntitlement(),
new FilesEntitlement(
List.of(
serverModuleFileDatas,
// Base ES directories
FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE),
FileData.ofPath(bootstrapArgs.pluginsDir(), READ),
FileData.ofPath(bootstrapArgs.configDir(), READ),
FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE),
FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE),
@ -199,10 +175,40 @@ public class EntitlementInitialization {
// // io stats on Linux
FileData.ofPath(Path.of("/proc/self/mountinfo"), READ).withPlatform(LINUX),
FileData.ofPath(Path.of("/proc/diskstats"), READ).withPlatform(LINUX)
);
if (bootstrapArgs.pidFile() != null) {
serverModuleFileDatas.add(FileData.ofPath(bootstrapArgs.pidFile(), READ_WRITE));
}
Collections.addAll(
serverScopes,
new Scope(
"org.elasticsearch.base",
List.of(
new CreateClassLoaderEntitlement(),
new FilesEntitlement(
List.of(
// TODO: what in es.base is accessing shared repo?
FileData.ofRelativePath(Path.of(""), SHARED_REPO, READ_WRITE),
FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE)
)
)
)
),
new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())),
new Scope(
"org.elasticsearch.server",
List.of(
new ExitVMEntitlement(),
new ReadStoreAttributesEntitlement(),
new CreateClassLoaderEntitlement(),
new InboundNetworkEntitlement(),
new OutboundNetworkEntitlement(),
new LoadNativeLibrariesEntitlement(),
new ManageThreadsEntitlement(),
new FilesEntitlement(serverModuleFileDatas)
)
),
new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())),
new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())),
new Scope(
@ -211,11 +217,7 @@ public class EntitlementInitialization {
new LoadNativeLibrariesEntitlement(),
new ManageThreadsEntitlement(),
new FilesEntitlement(
List.of(
FileData.ofPath(bootstrapArgs.configDir(), READ),
FileData.ofPath(bootstrapArgs.tempDir(), READ),
FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE)
)
List.of(FileData.ofPath(bootstrapArgs.configDir(), READ), FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE))
)
)
),
@ -223,7 +225,10 @@ public class EntitlementInitialization {
"org.apache.lucene.misc",
List.of(new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE))))
),
new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())),
new Scope(
"org.apache.logging.log4j.core",
List.of(new ManageThreadsEntitlement(), new FilesEntitlement(List.of(FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE))))
),
new Scope(
"org.elasticsearch.nativeaccess",
List.of(
@ -256,7 +261,9 @@ public class EntitlementInitialization {
new FilesEntitlement(
List.of(
FileData.ofPath(Path.of("/co/elastic/apm/agent/"), READ),
FileData.ofPath(Path.of("/agent/co/elastic/apm/agent/"), READ)
FileData.ofPath(Path.of("/agent/co/elastic/apm/agent/"), READ),
FileData.ofPath(Path.of("/proc/meminfo"), READ),
FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ)
)
)
);
@ -268,7 +275,8 @@ public class EntitlementInitialization {
resolver,
AGENTS_PACKAGE_NAME,
ENTITLEMENTS_MODULE,
pathLookup
pathLookup,
bootstrapArgs.suppressFailureLogClasses()
);
}

View file

@ -10,18 +10,27 @@
package org.elasticsearch.entitlement.runtime.policy;
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.function.BiConsumer;
import static org.elasticsearch.core.PathUtils.getDefaultFileSystem;
public final class FileAccessTree {
private static final Logger logger = LogManager.getLogger(FileAccessTree.class);
private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator();
private final String[] readPaths;
@ -30,6 +39,27 @@ public final class FileAccessTree {
private FileAccessTree(FilesEntitlement filesEntitlement, PathLookup pathLookup) {
List<String> readPaths = new ArrayList<>();
List<String> writePaths = new ArrayList<>();
BiConsumer<Path, Mode> addPath = (path, mode) -> {
var normalized = normalizePath(path);
if (mode == Mode.READ_WRITE) {
writePaths.add(normalized);
}
readPaths.add(normalized);
};
BiConsumer<Path, Mode> addPathAndMaybeLink = (path, mode) -> {
addPath.accept(path, mode);
// also try to follow symlinks. Lucene does this and writes to the target path.
if (Files.exists(path)) {
try {
Path realPath = path.toRealPath();
if (realPath.equals(path) == false) {
addPath.accept(realPath, mode);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
};
for (FilesEntitlement.FileData fileData : filesEntitlement.filesData()) {
var platform = fileData.platform();
if (platform != null && platform.isCurrent() == false) {
@ -38,18 +68,20 @@ public final class FileAccessTree {
var mode = fileData.mode();
var paths = fileData.resolvePaths(pathLookup);
paths.forEach(path -> {
var normalized = normalizePath(path);
if (mode == FilesEntitlement.Mode.READ_WRITE) {
writePaths.add(normalized);
if (path == null) {
// TODO: null paths shouldn't be allowed, but they can occur due to repo paths
return;
}
readPaths.add(normalized);
addPathAndMaybeLink.accept(path, mode);
});
}
// everything has access to the temp dir
String tempDir = normalizePath(pathLookup.tempDir());
readPaths.add(tempDir);
writePaths.add(tempDir);
// everything has access to the temp dir and the jdk
addPathAndMaybeLink.accept(pathLookup.tempDir(), Mode.READ_WRITE);
// TODO: watcher uses javax.activation which looks for known mime types configuration, should this be global or explicit in watcher?
Path jdk = Paths.get(System.getProperty("java.home"));
addPathAndMaybeLink.accept(jdk.resolve("conf"), Mode.READ);
readPaths.sort(PATH_ORDER);
writePaths.sort(PATH_ORDER);

View file

@ -11,8 +11,6 @@ package org.elasticsearch.entitlement.runtime.policy;
import org.elasticsearch.core.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap;
import org.elasticsearch.entitlement.bridge.EntitlementChecker;
import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement;
@ -115,6 +113,7 @@ public class PolicyManager {
private final Function<Class<?>, String> pluginResolver;
private final PathLookup pathLookup;
private final FileAccessTree defaultFileAccess;
private final Set<Class<?>> mutedClasses;
public static final String ALL_UNNAMED = "ALL-UNNAMED";
@ -126,11 +125,12 @@ public class PolicyManager {
.stream()
.map(ModuleReference::descriptor)
.collect(Collectors.toUnmodifiableSet());
return ModuleLayer.boot()
.modules()
.stream()
.filter(m -> systemModulesDescriptors.contains(m.getDescriptor()))
.collect(Collectors.toUnmodifiableSet());
return Stream.concat(
// entitlements is a "system" module, we can do anything from it
Stream.of(PolicyManager.class.getModule()),
// anything in the boot layer is also part of the system
ModuleLayer.boot().modules().stream().filter(m -> systemModulesDescriptors.contains(m.getDescriptor()))
).collect(Collectors.toUnmodifiableSet());
}
/**
@ -150,7 +150,8 @@ public class PolicyManager {
Function<Class<?>, String> pluginResolver,
String apmAgentPackageName,
Module entitlementsModule,
PathLookup pathLookup
PathLookup pathLookup,
Set<Class<?>> suppressFailureLogClasses
) {
this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy));
this.apmAgentEntitlements = apmAgentEntitlements;
@ -162,6 +163,7 @@ public class PolicyManager {
this.entitlementsModule = entitlementsModule;
this.pathLookup = requireNonNull(pathLookup);
this.defaultFileAccess = FileAccessTree.of(FilesEntitlement.EMPTY, pathLookup);
this.mutedClasses = suppressFailureLogClasses;
for (var e : serverEntitlements.entrySet()) {
validateEntitlementsPerModule(SERVER_COMPONENT_NAME, e.getKey(), e.getValue());
@ -386,7 +388,7 @@ public class PolicyManager {
checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass, callerClass);
}
private static void checkFlagEntitlement(
private void checkFlagEntitlement(
ModuleEntitlements classEntitlements,
Class<? extends Entitlement> entitlementClass,
Class<?> requestingClass,
@ -446,10 +448,10 @@ public class PolicyManager {
);
}
private static void notEntitled(String message, Class<?> callerClass) {
private void notEntitled(String message, Class<?> callerClass) {
var exception = new NotEntitledException(message);
// don't log self tests in EntitlementBootstrap
if (EntitlementBootstrap.class.equals(callerClass) == false) {
// Don't emit a log for muted classes, e.g. classes containing self tests
if (mutedClasses.contains(callerClass) == false) {
logger.warn(message, exception);
}
throw exception;
@ -564,10 +566,6 @@ public class PolicyManager {
logger.debug("Entitlement trivially allowed from system module [{}]", requestingClass.getModule().getName());
return true;
}
if (EntitlementChecker.class.isAssignableFrom(requestingClass)) {
logger.debug("Entitlement trivially allowed for EntitlementChecker class");
return true;
}
logger.trace("Entitlement not trivially allowed");
return false;
}

View file

@ -10,11 +10,15 @@
package org.elasticsearch.entitlement.runtime.policy;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement;
import org.elasticsearch.test.ESTestCase;
import org.junit.BeforeClass;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -23,6 +27,7 @@ import java.util.Map;
import static org.elasticsearch.core.PathUtils.getDefaultFileSystem;
import static org.hamcrest.Matchers.is;
@ESTestCase.WithoutSecurityManager
public class FileAccessTreeTests extends ESTestCase {
static Path root;
@ -211,6 +216,45 @@ public class FileAccessTreeTests extends ESTestCase {
assertThat(tree.canRead(path("m/n")), is(true));
}
public void testJdkAccess() {
Path jdkDir = Paths.get(System.getProperty("java.home"));
var confDir = jdkDir.resolve("conf");
var tree = accessTree(FilesEntitlement.EMPTY);
assertThat(tree.canRead(confDir), is(true));
assertThat(tree.canWrite(confDir), is(false));
assertThat(tree.canRead(jdkDir), is(false));
}
@SuppressForbidden(reason = "don't care about the directory location in tests")
public void testFollowLinks() throws IOException {
Path baseSourceDir = Files.createTempDirectory("fileaccess_source");
Path source1Dir = baseSourceDir.resolve("source1");
Files.createDirectory(source1Dir);
Path source2Dir = baseSourceDir.resolve("source2");
Files.createDirectory(source2Dir);
Path baseTargetDir = Files.createTempDirectory("fileaccess_target");
Path readTarget = baseTargetDir.resolve("read_link");
Path writeTarget = baseTargetDir.resolve("write_link");
Files.createSymbolicLink(readTarget, source1Dir);
Files.createSymbolicLink(writeTarget, source2Dir);
var tree = accessTree(entitlement(readTarget.toString(), "read", writeTarget.toString(), "read_write"));
assertThat(tree.canRead(baseSourceDir), is(false));
assertThat(tree.canRead(baseTargetDir), is(false));
assertThat(tree.canRead(readTarget), is(true));
assertThat(tree.canWrite(readTarget), is(false));
assertThat(tree.canRead(source1Dir), is(true));
assertThat(tree.canWrite(source1Dir), is(false));
assertThat(tree.canRead(writeTarget), is(true));
assertThat(tree.canWrite(writeTarget), is(true));
assertThat(tree.canRead(source2Dir), is(true));
assertThat(tree.canWrite(source2Dir), is(true));
}
public void testTempDirAccess() {
var tree = FileAccessTree.of(FilesEntitlement.EMPTY, TEST_PATH_LOOKUP);
assertThat(tree.canRead(TEST_PATH_LOOKUP.tempDir()), is(true));

View file

@ -87,7 +87,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Any class from the current module (unnamed) will do
@ -111,7 +112,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Any class from the current module (unnamed) will do
@ -131,7 +133,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Any class from the current module (unnamed) will do
@ -156,7 +159,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "plugin2",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Any class from the current module (unnamed) will do
@ -174,7 +178,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> null,
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Tests do not run modular, so we cannot use a server class.
@ -204,7 +209,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> null,
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Tests do not run modular, so we cannot use a server class.
@ -230,7 +236,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "mock-plugin",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
var layer = createLayerForJar(jar, "org.example.plugin");
@ -249,7 +256,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "plugin2",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
// Any class from the current module (unnamed) will do
@ -308,7 +316,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class);
assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true));
@ -336,7 +345,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "test",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
)
);
assertEquals(
@ -353,7 +363,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "test",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
)
);
assertEquals(
@ -387,7 +398,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
)
);
assertEquals(
@ -407,7 +419,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "test", // Insist that the class is in a plugin
TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class);
assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false));
@ -428,7 +441,8 @@ public class PolicyManagerTests extends ESTestCase {
c -> "test",
agentsPackageName,
entitlementsModule,
TEST_PATH_LOOKUP
TEST_PATH_LOOKUP,
Set.of()
);
}

View file

@ -1,2 +1,2 @@
org.elasticsearch.script.expression:
org.apache.lucene.expressions:
- create_class_loader

View file

@ -257,9 +257,6 @@ tests:
- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT
method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously}
issue: https://github.com/elastic/elasticsearch/issues/122102
- class: org.elasticsearch.search.SearchCancellationIT
method: testCancelFailedSearchWhenPartialResultDisallowed
issue: https://github.com/elastic/elasticsearch/issues/121719
- class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT
issue: https://github.com/elastic/elasticsearch/issues/121716
- class: org.elasticsearch.smoketest.SmokeTestMonitoringWithSecurityIT
@ -298,8 +295,6 @@ tests:
- class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT
method: testSearchWithRandomDisconnects
issue: https://github.com/elastic/elasticsearch/issues/122707
- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT
issue: https://github.com/elastic/elasticsearch/issues/122810
- class: org.elasticsearch.snapshots.DedicatedClusterSnapshotRestoreIT
method: testRestoreShrinkIndex
issue: https://github.com/elastic/elasticsearch/issues/121717
@ -318,9 +313,6 @@ tests:
- class: org.elasticsearch.repositories.gcs.GoogleCloudStorageServiceTests
method: testClientsAreNotSharedAcrossRepositories
issue: https://github.com/elastic/elasticsearch/issues/123090
- class: org.elasticsearch.xpack.esql.action.EnrichIT
method: testAvgDurationByArtist
issue: https://github.com/elastic/elasticsearch/issues/123093
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/troubleshooting/common-issues/disk-usage-exceeded/line_65}
issue: https://github.com/elastic/elasticsearch/issues/123094
@ -345,6 +337,21 @@ tests:
- class: org.elasticsearch.action.admin.indices.diskusage.IndexDiskUsageAnalyzerTests
method: testCompletionField
issue: https://github.com/elastic/elasticsearch/issues/123269
- class: org.elasticsearch.index.mapper.IPSyntheticSourceNativeArrayIntegrationTests
method: testSynthesizeArray
issue: https://github.com/elastic/elasticsearch/issues/123417
- class: org.elasticsearch.index.mapper.IPSyntheticSourceNativeArrayIntegrationTests
method: testSynthesizeArrayRandom
issue: https://github.com/elastic/elasticsearch/issues/123418
- class: org.elasticsearch.index.mapper.IPSyntheticSourceNativeArrayIntegrationTests
method: testSynthesizeArrayIgnoreMalformed
issue: https://github.com/elastic/elasticsearch/issues/123419
- class: org.elasticsearch.packaging.test.DockerTests
method: test151MachineDependentHeapWithSizeOverride
issue: https://github.com/elastic/elasticsearch/issues/123437
- class: org.elasticsearch.xpack.esql.action.CrossClusterQueryWithPartialResultsIT
method: testOneRemoteClusterPartial
issue: https://github.com/elastic/elasticsearch/issues/123451
# Examples:
#

View file

@ -6,6 +6,10 @@
},
"stability": "experimental",
"visibility": "public",
"deprecated": {
"version": "9.0.0",
"description": "Behavioral Analytics has been deprecated and will be removed in a future release."
},
"headers": {
"accept": [
"application/json"

View file

@ -6,6 +6,10 @@
},
"stability": "experimental",
"visibility": "public",
"deprecated": {
"version": "9.0.0",
"description": "Behavioral Analytics has been deprecated and will be removed in a future release."
},
"headers": {
"accept": [
"application/json"

View file

@ -6,6 +6,10 @@
},
"stability": "experimental",
"visibility": "public",
"deprecated": {
"version": "9.0.0",
"description": "Behavioral Analytics has been deprecated and will be removed in a future release."
},
"headers": {
"accept": [
"application/json"

View file

@ -5,6 +5,10 @@
"description": "Creates a behavioral analytics collection."
},
"stability": "experimental",
"deprecated": {
"version": "9.0.0",
"description": "Behavioral Analytics has been deprecated and will be removed in a future release."
},
"visibility": "public",
"headers": {
"accept": [

View file

@ -267,7 +267,7 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
if (letOneShardProceed.compareAndSet(false, true)) {
// Let one shard continue.
} else {
safeAwait(shardTaskLatch); // Block the other shards.
safeAwait(shardTaskLatch, TimeValue.timeValueSeconds(30)); // Block the other shards.
}
});
}

View file

@ -275,7 +275,7 @@ public class TransportVersions {
* Reference to the earliest compatible transport version to this version of the codebase.
* This should be the transport version used by the highest minor version of the previous major.
*/
public static final TransportVersion MINIMUM_COMPATIBLE = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1;
public static final TransportVersion MINIMUM_COMPATIBLE = INITIAL_ELASTICSEARCH_8_19;
/**
* Reference to the minimum transport version that can be used with CCS.

View file

@ -92,6 +92,7 @@ public class TransportGetDesiredBalanceAction extends TransportMasterNodeReadAct
return;
}
var clusterInfo = clusterInfoService.getClusterInfo();
writeLoadForecaster.refreshLicense();
listener.onResponse(
new DesiredBalanceResponse(
desiredBalanceShardsAllocator.getStats(),

View file

@ -450,6 +450,7 @@ public class MetadataRolloverService {
);
}
writeLoadForecaster.refreshLicense();
metadataBuilder = writeLoadForecaster.withWriteLoadForecastForWriteIndex(dataStreamName, metadataBuilder);
metadataBuilder = withShardSizeForecastForWriteIndex(dataStreamName, metadataBuilder);

View file

@ -29,9 +29,11 @@ import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.util.concurrent.RunOnce;
import org.elasticsearch.core.AbstractRefCounted;
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap;
import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
import org.elasticsearch.entitlement.runtime.policy.Policy;
import org.elasticsearch.entitlement.runtime.policy.PolicyParserUtils;
import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement;
@ -54,6 +56,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.Permission;
@ -117,9 +120,9 @@ class Elasticsearch {
final PrintStream out = getStdout();
final PrintStream err = getStderr();
final ServerArgs args;
final boolean entitlementsExplicitlyEnabled = Booleans.parseBoolean(System.getProperty("es.entitlements.enabled", "false"));
final boolean entitlementsEnabled = Booleans.parseBoolean(System.getProperty("es.entitlements.enabled", "true"));
// java 24+ only supports entitlements, but it may be enabled on earlier versions explicitly
final boolean useEntitlements = RuntimeVersionFeature.isSecurityManagerAvailable() == false || entitlementsExplicitlyEnabled;
final boolean useEntitlements = RuntimeVersionFeature.isSecurityManagerAvailable() == false || entitlementsEnabled;
try {
initSecurityProperties();
@ -251,9 +254,13 @@ class Elasticsearch {
nodeEnv.repoDirs(),
nodeEnv.configDir(),
nodeEnv.libDir(),
nodeEnv.pluginsDir(),
nodeEnv.logsDir(),
nodeEnv.tmpDir()
nodeEnv.tmpDir(),
args.pidFile(),
Set.of(EntitlementSelfTester.class)
);
EntitlementSelfTester.entitlementSelfTest();
} else {
assert RuntimeVersionFeature.isSecurityManagerAvailable();
// no need to explicitly enable native access for legacy code
@ -270,6 +277,36 @@ class Elasticsearch {
bootstrap.setPluginsLoader(pluginsLoader);
}
private static class EntitlementSelfTester {
// check entitlements were loaded correctly. note this must be outside the entitlements lib.
private static void entitlementSelfTest() {
ensureCannotStartProcess(ProcessBuilder::start);
// Try again with reflection
ensureCannotStartProcess(EntitlementSelfTester::reflectiveStartProcess);
}
private static void ensureCannotStartProcess(CheckedConsumer<ProcessBuilder, ?> startProcess) {
try {
// The command doesn't matter; it doesn't even need to exist
startProcess.accept(new ProcessBuilder(""));
} catch (NotEntitledException e) {
return;
} catch (Exception e) {
throw new IllegalStateException("Failed entitlement protection self-test", e);
}
throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted");
}
private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception {
try {
var start = ProcessBuilder.class.getMethod("start");
start.invoke(pb);
} catch (InvocationTargetException e) {
throw (Exception) e.getCause();
}
}
}
private static void ensureInitialized(Class<?>... classes) {
for (final var clazz : classes) {
try {

View file

@ -19,7 +19,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.xcontent.ConstructingObjectParser;
@ -373,8 +372,6 @@ public class ComposableIndexTemplate implements SimpleDiffable<ComposableIndexTe
private static final ParseField HIDDEN = new ParseField("hidden");
private static final ParseField ALLOW_CUSTOM_ROUTING = new ParseField("allow_custom_routing");
// Remove this after this PR gets backported
@UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT)
private static final ParseField FAILURE_STORE = new ParseField("failure_store");
public static final ConstructingObjectParser<DataStreamTemplate, Void> PARSER = new ConstructingObjectParser<>(

View file

@ -70,6 +70,10 @@ public class NodeAllocationStatsAndWeightsCalculator {
ClusterInfo clusterInfo,
@Nullable DesiredBalance desiredBalance
) {
if (metadata.indices().isEmpty() == false) {
// must not use licensed features when just starting up
writeLoadForecaster.refreshLicense();
}
var weightFunction = new WeightFunction(shardBalanceFactor, indexBalanceFactor, writeLoadBalanceFactor, diskUsageBalanceFactor);
var avgShardsPerNode = WeightFunction.avgShardPerNode(metadata, routingNodes);
var avgWriteLoadPerNode = WeightFunction.avgWriteLoadPerNode(writeLoadForecaster, metadata, routingNodes);

View file

@ -21,6 +21,8 @@ public interface WriteLoadForecaster {
OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata);
void refreshLicense();
class DefaultWriteLoadForecaster implements WriteLoadForecaster {
@Override
public ProjectMetadata.Builder withWriteLoadForecastForWriteIndex(String dataStreamName, ProjectMetadata.Builder metadata) {
@ -31,5 +33,8 @@ public interface WriteLoadForecaster {
public OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata) {
return OptionalDouble.empty();
}
@Override
public void refreshLicense() {}
}
}

View file

@ -147,6 +147,11 @@ public class BalancedShardsAllocator implements ShardsAllocator {
@Override
public void allocate(RoutingAllocation allocation) {
if (allocation.metadata().indices().isEmpty() == false) {
// must not use licensed features when just starting up
writeLoadForecaster.refreshLicense();
}
assert allocation.ignoreDisable() == false;
if (allocation.routingNodes().size() == 0) {

View file

@ -150,6 +150,7 @@ public class IndexVersions {
public static final IndexVersion TIMESTAMP_DOC_VALUES_SPARSE_INDEX = def(9_011_0_00, Version.LUCENE_10_1_0);
public static final IndexVersion TIME_SERIES_ID_DOC_VALUES_SPARSE_INDEX = def(9_012_0_00, Version.LUCENE_10_1_0);
public static final IndexVersion SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD = def(9_013_0_00, Version.LUCENE_10_1_0);
public static final IndexVersion SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_IP = def(9_014_0_00, Version.LUCENE_10_1_0);
/*
* STOP! READ THIS FIRST! No, really,
* ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _

View file

@ -13,6 +13,8 @@ import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.util.BitUtil;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.IndexVersions;
import java.io.IOException;
import java.util.ArrayList;
@ -23,9 +25,10 @@ import java.util.TreeMap;
public class FieldArrayContext {
private static final String OFFSETS_FIELD_NAME_SUFFIX = ".offsets";
private final Map<String, Offsets> offsetsPerField = new HashMap<>();
void recordOffset(String field, String value) {
void recordOffset(String field, Comparable<?> value) {
Offsets arrayOffsets = offsetsPerField.computeIfAbsent(field, k -> new Offsets());
int nextOffset = arrayOffsets.currentOffset++;
var offsets = arrayOffsets.valueToOffsets.computeIfAbsent(value, s -> new ArrayList<>(2));
@ -79,13 +82,53 @@ public class FieldArrayContext {
return offsetToOrd;
}
static String getOffsetsFieldName(
MapperBuilderContext context,
Mapper.SourceKeepMode indexSourceKeepMode,
boolean hasDocValues,
boolean isStored,
FieldMapper.Builder fieldMapperBuilder,
IndexVersion indexCreatedVersion,
IndexVersion minSupportedVersionMain
) {
var sourceKeepMode = fieldMapperBuilder.sourceKeepMode.orElse(indexSourceKeepMode);
if (context.isSourceSynthetic()
&& sourceKeepMode == Mapper.SourceKeepMode.ARRAYS
&& hasDocValues
&& isStored == false
&& fieldMapperBuilder.copyTo.copyToFields().isEmpty()
&& fieldMapperBuilder.multiFieldsBuilder.hasMultiFields() == false
&& indexVersionSupportStoringArraysNatively(indexCreatedVersion, minSupportedVersionMain)) {
// Skip stored, we will be synthesizing from stored fields, no point to keep track of the offsets
// Skip copy_to and multi fields, supporting that requires more work. However, copy_to usage is rare in metrics and
// logging use cases
// keep track of value offsets so that we can reconstruct arrays from doc values in order as was specified during indexing
// (if field is stored then there is no point of doing this)
return context.buildFullName(fieldMapperBuilder.leafName() + FieldArrayContext.OFFSETS_FIELD_NAME_SUFFIX);
} else {
return null;
}
}
private static boolean indexVersionSupportStoringArraysNatively(
IndexVersion indexCreatedVersion,
IndexVersion minSupportedVersionMain
) {
return indexCreatedVersion.onOrAfter(minSupportedVersionMain)
|| indexCreatedVersion.between(
IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD_BACKPORT_8_X,
IndexVersions.UPGRADE_TO_LUCENE_10_0_0
);
}
private static class Offsets {
int currentOffset;
// Need to use TreeMap here, so that we maintain the order in which each value (with offset) stored inserted,
// (which is in the same order the document gets parsed) so we store offsets in right order. This is the same
// order in what the values get stored in SortedSetDocValues.
final Map<String, List<Integer>> valueToOffsets = new TreeMap<>();
final Map<Comparable<?>, List<Integer>> valueToOffsets = new TreeMap<>();
final List<Integer> nullValueOffsets = new ArrayList<>(2);
}

View file

@ -55,6 +55,7 @@ import java.util.Map;
import java.util.Objects;
import java.util.function.BiFunction;
import static org.elasticsearch.index.mapper.FieldArrayContext.getOffsetsFieldName;
import static org.elasticsearch.index.mapper.IpPrefixAutomatonUtil.buildIpPrefixAutomaton;
/**
@ -92,8 +93,15 @@ public class IpFieldMapper extends FieldMapper {
private final boolean ignoreMalformedByDefault;
private final IndexVersion indexCreatedVersion;
private final ScriptCompiler scriptCompiler;
private final SourceKeepMode indexSourceKeepMode;
public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault, IndexVersion indexCreatedVersion) {
public Builder(
String name,
ScriptCompiler scriptCompiler,
boolean ignoreMalformedByDefault,
IndexVersion indexCreatedVersion,
SourceKeepMode indexSourceKeepMode
) {
super(name);
this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
this.ignoreMalformedByDefault = ignoreMalformedByDefault;
@ -114,6 +122,7 @@ public class IpFieldMapper extends FieldMapper {
);
}
});
this.indexSourceKeepMode = indexSourceKeepMode;
}
Builder nullValue(String nullValue) {
@ -184,6 +193,16 @@ public class IpFieldMapper extends FieldMapper {
}
hasScript = script.get() != null;
onScriptError = onScriptErrorParam.getValue();
String offsetsFieldName = getOffsetsFieldName(
context,
indexSourceKeepMode,
hasDocValues.getValue(),
stored.getValue(),
this,
indexCreatedVersion,
IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_IP
);
return new IpFieldMapper(
leafName(),
new IpFieldType(
@ -198,7 +217,8 @@ public class IpFieldMapper extends FieldMapper {
),
builderParams(this, context),
context.isSourceSynthetic(),
this
this,
offsetsFieldName
);
}
@ -206,7 +226,7 @@ public class IpFieldMapper extends FieldMapper {
public static final TypeParser PARSER = createTypeParserWithLegacySupport((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, c.scriptCompiler(), ignoreMalformedByDefault, c.indexVersionCreated());
return new Builder(n, c.scriptCompiler(), ignoreMalformedByDefault, c.indexVersionCreated(), c.getIndexSettings().sourceKeepMode());
});
public static final class IpFieldType extends SimpleMappedFieldType {
@ -501,13 +521,16 @@ public class IpFieldMapper extends FieldMapper {
private final Script script;
private final FieldValues<InetAddress> scriptValues;
private final ScriptCompiler scriptCompiler;
private final SourceKeepMode indexSourceKeepMode;
private final String offsetsFieldName;
private IpFieldMapper(
String simpleName,
MappedFieldType mappedFieldType,
BuilderParams builderParams,
boolean storeIgnored,
Builder builder
Builder builder,
String offsetsFieldName
) {
super(simpleName, mappedFieldType, builderParams);
this.ignoreMalformedByDefault = builder.ignoreMalformedByDefault;
@ -523,6 +546,8 @@ public class IpFieldMapper extends FieldMapper {
this.scriptCompiler = builder.scriptCompiler;
this.dimension = builder.dimension.getValue();
this.storeIgnored = storeIgnored;
this.indexSourceKeepMode = builder.indexSourceKeepMode;
this.offsetsFieldName = offsetsFieldName;
}
@Override
@ -561,6 +586,14 @@ public class IpFieldMapper extends FieldMapper {
if (address != null) {
indexValue(context, address);
}
if (offsetsFieldName != null && context.isImmediateParentAnArray() && context.canAddIgnoredField()) {
if (address != null) {
BytesRef sortableValue = new BytesRef(InetAddressPoint.encode(address));
context.getOffSetContext().recordOffset(offsetsFieldName, sortableValue);
} else {
context.getOffSetContext().recordNull(offsetsFieldName);
}
}
}
private void indexValue(DocumentParserContext context, InetAddress address) {
@ -593,7 +626,9 @@ public class IpFieldMapper extends FieldMapper {
@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(leafName(), scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).dimension(dimension).init(this);
return new Builder(leafName(), scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexSourceKeepMode).dimension(
dimension
).init(this);
}
@Override
@ -610,11 +645,15 @@ public class IpFieldMapper extends FieldMapper {
if (hasDocValues) {
return new SyntheticSourceSupport.Native(() -> {
var layers = new ArrayList<CompositeSyntheticFieldLoader.Layer>();
if (offsetsFieldName != null) {
layers.add(
new SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer(fullPath(), offsetsFieldName, IpFieldMapper::convert)
);
} else {
layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) {
@Override
protected BytesRef convert(BytesRef value) {
byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length);
return new BytesRef(NetworkAddress.format(InetAddressPoint.decode(bytes)));
return IpFieldMapper.convert(value);
}
@Override
@ -623,6 +662,7 @@ public class IpFieldMapper extends FieldMapper {
return value;
}
});
}
if (ignoreMalformed) {
layers.add(new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath()));
@ -633,4 +673,14 @@ public class IpFieldMapper extends FieldMapper {
return super.syntheticSourceSupport();
}
static BytesRef convert(BytesRef value) {
byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length);
return new BytesRef(NetworkAddress.format(InetAddressPoint.decode(bytes)));
}
@Override
public String getOffsetFieldName() {
return offsetsFieldName;
}
}

View file

@ -85,6 +85,7 @@ import static org.apache.lucene.index.IndexWriter.MAX_TERM_LENGTH;
import static org.elasticsearch.core.Strings.format;
import static org.elasticsearch.index.IndexSettings.IGNORE_ABOVE_SETTING;
import static org.elasticsearch.index.IndexSettings.USE_DOC_VALUES_SKIPPER;
import static org.elasticsearch.index.mapper.FieldArrayContext.getOffsetsFieldName;
/**
* A field mapper for keywords. This mapper accepts strings and indexes them as-is.
@ -95,7 +96,6 @@ public final class KeywordFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "keyword";
private static final String HOST_NAME = "host.name";
public static final String OFFSETS_FIELD_NAME_SUFFIX = ".offsets";
public static class Defaults {
public static final FieldType FIELD_TYPE;
@ -439,26 +439,15 @@ public final class KeywordFieldMapper extends FieldMapper {
super.hasScript = script.get() != null;
super.onScriptError = onScriptError.getValue();
var sourceKeepMode = this.sourceKeepMode.orElse(indexSourceKeepMode);
String offsetsFieldName;
if (context.isSourceSynthetic()
&& sourceKeepMode == SourceKeepMode.ARRAYS
&& hasDocValues()
&& fieldtype.stored() == false
&& copyTo.copyToFields().isEmpty()
&& multiFieldsBuilder.hasMultiFields() == false
&& indexVersionSupportStoringArraysNatively()) {
// Skip stored, we will be synthesizing from stored fields, no point to keep track of the offsets
// Skip copy_to and multi fields, supporting that requires more work. However, copy_to usage is rare in metrics and
// logging use cases
// keep track of value offsets so that we can reconstruct arrays from doc values in order as was specified during indexing
// (if field is stored then there is no point of doing this)
offsetsFieldName = context.buildFullName(leafName() + OFFSETS_FIELD_NAME_SUFFIX);
} else {
offsetsFieldName = null;
}
String offsetsFieldName = getOffsetsFieldName(
context,
indexSourceKeepMode,
hasDocValues.getValue(),
stored.getValue(),
this,
indexCreatedVersion,
IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD
);
return new KeywordFieldMapper(
leafName(),
fieldtype,
@ -472,14 +461,6 @@ public final class KeywordFieldMapper extends FieldMapper {
);
}
private boolean indexVersionSupportStoringArraysNatively() {
return indexCreatedVersion.onOrAfter(IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD)
|| indexCreatedVersion.between(
IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD_BACKPORT_8_X,
IndexVersions.UPGRADE_TO_LUCENE_10_0_0
);
}
private FieldType resolveFieldType(
final boolean useDocValuesSkipper,
final IndexVersion indexCreatedVersion,
@ -1127,7 +1108,7 @@ public final class KeywordFieldMapper extends FieldMapper {
}
boolean indexed = indexValue(context, value);
if (offsetsFieldName != null && context.isImmediateParentAnArray() && context.getRecordedSource() == false) {
if (offsetsFieldName != null && context.isImmediateParentAnArray() && context.canAddIgnoredField()) {
if (indexed) {
context.getOffSetContext().recordOffset(offsetsFieldName, value);
} else if (value == null) {

View file

@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Function;
/**
* Load {@code _source} fields from {@link SortedSetDocValues} and associated {@link BinaryDocValues}. The former contains the unique values
@ -30,11 +31,29 @@ final class SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer implements Co
private final String name;
private final String offsetsFieldName;
private final Function<BytesRef, BytesRef> converter;
private DocValuesWithOffsetsLoader docValues;
/**
* @param name The name of the field to synthesize
* @param offsetsFieldName The related offset field used to correctly synthesize the field if it is a leaf array
*/
SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer(String name, String offsetsFieldName) {
this(name, offsetsFieldName, Function.identity());
}
/**
* @param name The name of the field to synthesize
* @param offsetsFieldName The related offset field used to correctly synthesize the field if it is a leaf array
* @param converter This field value loader layer synthesizes the values read from doc values as utf8 string. If the doc value
* values aren't serializable as utf8 string then it is the responsibility of the converter to covert into a
* format that can be serialized as utf8 string. For example IP field mapper doc values can't directly be
* serialized as utf8 string.
*/
SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer(String name, String offsetsFieldName, Function<BytesRef, BytesRef> converter) {
this.name = Objects.requireNonNull(name);
this.offsetsFieldName = Objects.requireNonNull(offsetsFieldName);
this.converter = Objects.requireNonNull(converter);
}
@Override
@ -47,7 +66,7 @@ final class SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer implements Co
SortedSetDocValues valueDocValues = DocValues.getSortedSet(leafReader, name);
SortedDocValues offsetDocValues = DocValues.getSorted(leafReader, offsetsFieldName);
return docValues = new DocValuesWithOffsetsLoader(valueDocValues, offsetDocValues);
return docValues = new DocValuesWithOffsetsLoader(valueDocValues, offsetDocValues, converter);
}
@Override
@ -78,15 +97,21 @@ final class SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer implements Co
static final class DocValuesWithOffsetsLoader implements DocValuesLoader {
private final SortedDocValues offsetDocValues;
private final SortedSetDocValues valueDocValues;
private final Function<BytesRef, BytesRef> converter;
private final ByteArrayStreamInput scratch = new ByteArrayStreamInput();
private boolean hasValue;
private boolean hasOffset;
private int[] offsetToOrd;
DocValuesWithOffsetsLoader(SortedSetDocValues valueDocValues, SortedDocValues offsetDocValues) {
DocValuesWithOffsetsLoader(
SortedSetDocValues valueDocValues,
SortedDocValues offsetDocValues,
Function<BytesRef, BytesRef> converter
) {
this.valueDocValues = valueDocValues;
this.offsetDocValues = offsetDocValues;
this.converter = converter;
}
@Override
@ -146,7 +171,7 @@ final class SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer implements Co
long ord = ords[offset];
BytesRef c = valueDocValues.lookupOrd(ord);
// This is keyword specific and needs to be updated once support is added for other field types:
c = converter.apply(c);
b.utf8Value(c.bytes, c.offset, c.length);
}
} else if (offsetToOrd != null) {
@ -158,6 +183,7 @@ final class SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer implements Co
} else {
for (int i = 0; i < valueDocValues.docValueCount(); i++) {
BytesRef c = valueDocValues.lookupOrd(valueDocValues.nextOrd());
c = converter.apply(c);
b.utf8Value(c.bytes, c.offset, c.length);
}
}

View file

@ -46,7 +46,7 @@ public final class PipelineConfiguration implements SimpleDiffable<PipelineConfi
static {
PARSER.declareString(Builder::setId, new ParseField("id"));
PARSER.declareField(
(parser, builder, aVoid) -> builder.setConfig(parser.map()),
(parser, builder, aVoid) -> builder.setConfig(parser.mapOrdered()),
new ParseField("config"),
ObjectParser.ValueType.OBJECT
);

View file

@ -28,6 +28,7 @@ import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.collect.Iterators.single;
import static org.elasticsearch.script.ScriptContextStats.Fields.CACHE_EVICTIONS_HISTORY;
import static org.elasticsearch.script.ScriptContextStats.Fields.COMPILATIONS_HISTORY;
import static org.elasticsearch.script.ScriptStats.Fields.CACHE_EVICTIONS;
import static org.elasticsearch.script.ScriptStats.Fields.COMPILATIONS;
@ -205,7 +206,7 @@ public record ScriptStats(
builder.endObject();
}
if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) {
builder.startObject(COMPILATIONS_HISTORY);
builder.startObject(CACHE_EVICTIONS_HISTORY);
cacheEvictionsHistory.toXContent(builder, params);
builder.endObject();
}

View file

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.network.NetworkAddress;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
public class IPSyntheticSourceNativeArrayIntegrationTests extends NativeArrayIntegrationTestCase {
@Override
protected String getFieldTypeName() {
return "ip";
}
@Override
protected String getRandomValue() {
return NetworkAddress.format(randomIp(true));
}
public void testSynthesizeArray() throws Exception {
var arrayValues = new Object[][] {
new Object[] { "192.168.1.4", "192.168.1.3", null, "192.168.1.2", null, "192.168.1.1" },
new Object[] { null, "192.168.1.2", null, "192.168.1.1" },
new Object[] { null },
new Object[] { null, null, null },
new Object[] { "192.168.1.3", "192.168.1.2", "192.168.1.1" } };
verifySyntheticArray(arrayValues);
}
public void testSynthesizeArrayIgnoreMalformed() throws Exception {
var mapping = jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "ip")
.field("ignore_malformed", true)
.endObject()
.endObject()
.endObject();
// Note values that would be ignored are added at the end of arrays,
// this makes testing easier as ignored values are always synthesized after regular values:
var arrayValues = new Object[][] {
new Object[] { null, "192.168.1.1", "192.168.1.2", "192.168.1.3", "192.168.1.4", null, "malformed" },
new Object[] { "192.168.1.1", "192.168.1.2", "malformed" },
new Object[] { "192.168.1.1", "192.168.1.1", "malformed" },
new Object[] { null, null, null, "malformed" },
new Object[] { "192.168.1.3", "192.168.1.3", "192.168.1.1", "malformed" } };
verifySyntheticArray(arrayValues, mapping, "_id", "field._ignore_malformed");
}
public void testSynthesizeObjectArray() throws Exception {
List<List<Object[]>> documents = new ArrayList<>();
{
List<Object[]> document = new ArrayList<>();
document.add(new Object[] { "192.168.1.3", "192.168.1.2", "192.168.1.1" });
document.add(new Object[] { "192.168.1.110", "192.168.1.109", "192.168.1.111" });
document.add(new Object[] { "192.168.1.2", "192.168.1.2", "192.168.1.1" });
documents.add(document);
}
{
List<Object[]> document = new ArrayList<>();
document.add(new Object[] { "192.168.1.9", "192.168.1.7", "192.168.1.5" });
document.add(new Object[] { "192.168.1.2", "192.168.1.4", "192.168.1.6" });
document.add(new Object[] { "192.168.1.7", "192.168.1.6", "192.168.1.5" });
documents.add(document);
}
verifySyntheticObjectArray(documents);
}
public void testSynthesizeArrayInObjectField() throws Exception {
List<Object[]> documents = new ArrayList<>();
documents.add(new Object[] { "192.168.1.254", "192.168.1.253", "192.168.1.252" });
documents.add(new Object[] { "192.168.1.112", "192.168.1.113", "192.168.1.114" });
documents.add(new Object[] { "192.168.1.3", "192.168.1.2", "192.168.1.1" });
documents.add(new Object[] { "192.168.1.9", "192.168.1.7", "192.168.1.5" });
documents.add(new Object[] { "192.168.1.2", "192.168.1.4", "192.168.1.6" });
documents.add(new Object[] { "192.168.1.7", "192.168.1.6", "192.168.1.5" });
verifySyntheticArrayInObject(documents);
}
}

View file

@ -439,4 +439,9 @@ public class IpFieldMapperTests extends MapperTestCase {
protected Function<Object, Object> loadBlockExpected() {
return v -> InetAddresses.toAddrString(InetAddressPoint.decode(BytesRef.deepCopyOf((BytesRef) v).bytes));
}
@Override
protected String randomSyntheticSourceKeep() {
return "all";
}
}

View file

@ -349,16 +349,24 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
}
public void testFetchSourceValue() throws IOException {
MappedFieldType mapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, IndexVersion.current()).build(
MapperBuilderContext.root(false, false)
).fieldType();
MappedFieldType mapper = new IpFieldMapper.Builder(
"field",
ScriptCompiler.NONE,
true,
IndexVersion.current(),
Mapper.SourceKeepMode.NONE
).build(MapperBuilderContext.root(false, false)).fieldType();
assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8::2:1"));
assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1"));
assertEquals(List.of("::1"), fetchSourceValue(mapper, "0:0:0:0:0:0:0:1"));
MappedFieldType nullValueMapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, IndexVersion.current()).nullValue(
"2001:db8:0:0:0:0:2:7"
).build(MapperBuilderContext.root(false, false)).fieldType();
MappedFieldType nullValueMapper = new IpFieldMapper.Builder(
"field",
ScriptCompiler.NONE,
true,
IndexVersion.current(),
Mapper.SourceKeepMode.NONE
).nullValue("2001:db8:0:0:0:0:2:7").build(MapperBuilderContext.root(false, false)).fieldType();
assertEquals(List.of("2001:db8::2:7"), fetchSourceValue(nullValueMapper, null));
}
}

View file

@ -0,0 +1,41 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.network.NetworkAddress;
public class IpOffsetDocValuesLoaderTests extends OffsetDocValuesLoaderTestCase {
public void testOffsetArray() throws Exception {
verifyOffsets("{\"field\":[\"192.168.1.1\",\"192.168.1.3\",\"192.168.1.2\",\"192.168.1.1\",\"192.168.1.9\",\"192.168.1.3\"]}");
verifyOffsets("{\"field\":[\"192.168.1.4\",null,\"192.168.1.3\",\"192.168.1.2\",null,\"192.168.1.1\"]}");
}
public void testOffsetNestedArray() throws Exception {
verifyOffsets(
"{\"field\":[\"192.168.1.2\",[\"192.168.1.1\"],[\"192.168.1.0\"],null,\"192.168.1.0\"]}",
"{\"field\":[\"192.168.1.2\",\"192.168.1.1\",\"192.168.1.0\",null,\"192.168.1.0\"]}"
);
verifyOffsets(
"{\"field\":[\"192.168.1.6\",[\"192.168.1.5\", [\"192.168.1.4\"]],[\"192.168.1.3\", [\"192.168.1.2\"]],null,\"192.168.1.1\"]}",
"{\"field\":[\"192.168.1.6\",\"192.168.1.5\",\"192.168.1.4\",\"192.168.1.3\",\"192.168.1.2\",null,\"192.168.1.1\"]}"
);
}
@Override
protected String getFieldTypeName() {
return "ip";
}
@Override
protected String randomValue() {
return NetworkAddress.format(randomIp(true));
}
}

View file

@ -9,145 +9,7 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DirectoryReader;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer.DocValuesWithOffsetsLoader;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.nullValue;
public class KeywordOffsetDocValuesLoaderTests extends MapperServiceTestCase {
@Override
protected Settings getIndexSettings() {
return Settings.builder()
.put("index.mapping.source.mode", "synthetic")
.put("index.mapping.synthetic_source_keep", "arrays")
.build();
}
public void testOffsetArrayNoDocValues() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword",
"doc_values": false
}
}
}
}
""";
try (var mapperService = createMapperService(mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetArrayStored() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword",
"store": true
}
}
}
}
""";
try (var mapperService = createMapperService(mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetMultiFields() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword",
"fields": {
"sub": {
"type": "text"
}
}
}
}
}
}
""";
try (var mapperService = createMapperService(mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetArrayNoSyntheticSource() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword"
}
}
}
}
""";
try (var mapperService = createMapperService(Settings.EMPTY, mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetArrayNoSourceArrayKeep() throws Exception {
var settingsBuilder = Settings.builder().put("index.mapping.source.mode", "synthetic");
String mapping;
if (randomBoolean()) {
mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword",
"synthetic_source_keep": "{{synthetic_source_keep}}"
}
}
}
}
""".replace("{{synthetic_source_keep}}", randomBoolean() ? "none" : "all");
} else {
mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword"
}
}
}
}
""";
if (randomBoolean()) {
settingsBuilder.put("index.mapping.synthetic_source_keep", "none");
}
}
try (var mapperService = createMapperService(settingsBuilder.build(), mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public class KeywordOffsetDocValuesLoaderTests extends OffsetDocValuesLoaderTestCase {
public void testOffsetArray() throws Exception {
verifyOffsets("{\"field\":[\"z\",\"x\",\"y\",\"c\",\"b\",\"a\"]}");
@ -162,76 +24,13 @@ public class KeywordOffsetDocValuesLoaderTests extends MapperServiceTestCase {
);
}
public void testOffsetEmptyArray() throws Exception {
verifyOffsets("{\"field\":[]}");
@Override
protected String getFieldTypeName() {
return "keyword";
}
public void testOffsetArrayWithNulls() throws Exception {
verifyOffsets("{\"field\":[null,null,null]}");
}
public void testOffsetArrayRandom() throws Exception {
StringBuilder values = new StringBuilder();
int numValues = randomIntBetween(0, 256);
for (int i = 0; i < numValues; i++) {
if (randomInt(10) == 1) {
values.append("null");
} else {
values.append('"').append(randomAlphanumericOfLength(2)).append('"');
}
if (i != (numValues - 1)) {
values.append(',');
@Override
protected String randomValue() {
return randomAlphanumericOfLength(2);
}
}
verifyOffsets("{\"field\":[" + values + "]}");
}
private void verifyOffsets(String source) throws IOException {
verifyOffsets(source, source);
}
private void verifyOffsets(String source, String expectedSource) throws IOException {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "keyword"
}
}
}
}
""";
verifyOffsets(mapping, source, expectedSource);
}
private void verifyOffsets(String mapping, String source, String expectedSource) throws IOException {
try (var mapperService = createMapperService(mapping)) {
var mapper = mapperService.documentMapper();
try (var directory = newDirectory()) {
var iw = indexWriterForSyntheticSource(directory);
var doc = mapper.parse(new SourceToParse("_id", new BytesArray(source), XContentType.JSON));
doc.updateSeqID(0, 0);
doc.version().setLongValue(0);
iw.addDocuments(doc.docs());
iw.close();
try (var indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) {
var layer = new SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer("field", "field.offsets");
var leafReader = indexReader.leaves().getFirst().reader();
var loader = (DocValuesWithOffsetsLoader) layer.docValuesLoader(leafReader, new int[] { 0 });
assertTrue(loader.advanceToDoc(0));
assertTrue(loader.count() > 0);
XContentBuilder builder = jsonBuilder().startObject();
builder.startArray("field");
loader.write(builder);
builder.endArray().endObject();
var actual = Strings.toString(builder);
assertEquals(expectedSource, actual);
}
}
}
}
}

View file

@ -9,38 +9,24 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.hamcrest.Matchers;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.nullValue;
public class KeywordSyntheticSourceNativeArrayIntegrationTests extends ESSingleNodeTestCase {
public class KeywordSyntheticSourceNativeArrayIntegrationTests extends NativeArrayIntegrationTestCase {
@Override
protected String getFieldTypeName() {
return "keyword";
}
@Override
protected String getRandomValue() {
return RandomStrings.randomAsciiOfLength(random(), 8);
}
public void testSynthesizeArray() throws Exception {
var arrayValues = new Object[][] {
@ -52,16 +38,6 @@ public class KeywordSyntheticSourceNativeArrayIntegrationTests extends ESSingleN
verifySyntheticArray(arrayValues);
}
public void testSynthesizeEmptyArray() throws Exception {
var arrayValues = new Object[][] { new Object[] {} };
verifySyntheticArray(arrayValues);
}
public void testSynthesizeArrayRandom() throws Exception {
var arrayValues = new Object[][] { generateRandomStringArray(64, 8, false, true) };
verifySyntheticArray(arrayValues);
}
public void testSynthesizeArrayIgnoreAbove() throws Exception {
var mapping = jsonBuilder().startObject()
.startObject("properties")
@ -79,7 +55,7 @@ public class KeywordSyntheticSourceNativeArrayIntegrationTests extends ESSingleN
new Object[] { "123", "1234", "12345" },
new Object[] { null, null, null, "blabla" },
new Object[] { "1", "2", "3", "blabla" } };
verifySyntheticArray(arrayValues, mapping, 4, "_id", "field._original");
verifySyntheticArray(arrayValues, mapping, "_id", "field._original");
}
public void testSynthesizeObjectArray() throws Exception {
@ -112,237 +88,4 @@ public class KeywordSyntheticSourceNativeArrayIntegrationTests extends ESSingleN
verifySyntheticArrayInObject(documents);
}
public void testSynthesizeArrayInObjectFieldRandom() throws Exception {
List<Object[]> documents = new ArrayList<>();
int numDocs = randomIntBetween(8, 256);
for (int i = 0; i < numDocs; i++) {
documents.add(generateRandomStringArray(64, 8, false, true));
}
verifySyntheticArrayInObject(documents);
}
private void verifySyntheticArray(Object[][] arrays) throws IOException {
var mapping = jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject();
verifySyntheticArray(arrays, mapping, null, "_id");
}
private void verifySyntheticArray(Object[][] arrays, XContentBuilder mapping, Integer ignoreAbove, String... expectedStoredFields)
throws IOException {
var indexService = createIndex(
"test-index",
Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(),
mapping
);
for (int i = 0; i < arrays.length; i++) {
var array = arrays[i];
var indexRequest = new IndexRequest("test-index");
indexRequest.id("my-id-" + i);
var source = jsonBuilder().startObject();
if (array != null) {
source.startArray("field");
for (Object arrayValue : array) {
source.value(arrayValue);
}
source.endArray();
} else {
source.field("field").nullValue();
}
indexRequest.source(source.endObject());
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
client().index(indexRequest).actionGet();
var searchRequest = new SearchRequest("test-index");
searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i));
var searchResponse = client().search(searchRequest).actionGet();
try {
var hit = searchResponse.getHits().getHits()[0];
assertThat(hit.getId(), equalTo("my-id-" + i));
var sourceAsMap = hit.getSourceAsMap();
assertThat(sourceAsMap, hasKey("field"));
var actualArray = (List<?>) sourceAsMap.get("field");
if (array == null) {
assertThat(actualArray, nullValue());
} else if (array.length == 0) {
assertThat(actualArray, empty());
} else {
assertThat(actualArray, Matchers.contains(array));
}
} finally {
searchResponse.decRef();
}
}
try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) {
var reader = searcher.getDirectoryReader();
for (int i = 0; i < arrays.length; i++) {
var document = reader.storedFields().document(i);
// Verify that there is no ignored source:
Set<String> storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList());
if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) {
assertThat(storedFieldNames, contains(expectedStoredFields));
} else {
String[] copyExpectedStoredFields = new String[expectedStoredFields.length + 1];
System.arraycopy(expectedStoredFields, 0, copyExpectedStoredFields, 0, expectedStoredFields.length);
copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_recovery_source";
assertThat(storedFieldNames, containsInAnyOrder(copyExpectedStoredFields));
}
}
var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("field.offsets");
assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED));
}
}
private void verifySyntheticObjectArray(List<List<Object[]>> documents) throws IOException {
var indexService = createIndex(
"test-index",
Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(),
jsonBuilder().startObject()
.startObject("properties")
.startObject("object")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
);
for (int i = 0; i < documents.size(); i++) {
var document = documents.get(i);
var indexRequest = new IndexRequest("test-index");
indexRequest.id("my-id-" + i);
var source = jsonBuilder().startObject();
source.startArray("object");
for (Object[] arrayValue : document) {
source.startObject();
source.array("field", arrayValue);
source.endObject();
}
source.endArray();
indexRequest.source(source.endObject());
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
client().index(indexRequest).actionGet();
var searchRequest = new SearchRequest("test-index");
searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i));
var searchResponse = client().search(searchRequest).actionGet();
try {
var hit = searchResponse.getHits().getHits()[0];
assertThat(hit.getId(), equalTo("my-id-" + i));
var sourceAsMap = hit.getSourceAsMap();
var objectArray = (List<?>) sourceAsMap.get("object");
for (int j = 0; j < document.size(); j++) {
var expected = document.get(j);
List<?> actual = (List<?>) ((Map<?, ?>) objectArray.get(j)).get("field");
assertThat(actual, Matchers.contains(expected));
}
} finally {
searchResponse.decRef();
}
}
indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1));
try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) {
var reader = searcher.getDirectoryReader();
for (int i = 0; i < documents.size(); i++) {
var document = reader.storedFields().document(i);
// Verify that there is ignored source because of leaf array being wrapped by object array:
List<String> storedFieldNames = document.getFields().stream().map(IndexableField::name).toList();
if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) {
assertThat(storedFieldNames, contains("_id", "_ignored_source"));
} else {
assertThat(storedFieldNames, containsInAnyOrder("_id", "_ignored_source", "_recovery_source"));
}
// Verify that there is no offset field:
LeafReader leafReader = reader.leaves().get(0).reader();
for (FieldInfo fieldInfo : leafReader.getFieldInfos()) {
String name = fieldInfo.getName();
assertFalse("expected no field that contains [offsets] in name, but found [" + name + "]", name.contains("offsets"));
}
var binaryDocValues = leafReader.getBinaryDocValues("object.field.offsets");
assertThat(binaryDocValues, nullValue());
}
}
}
private void verifySyntheticArrayInObject(List<Object[]> documents) throws IOException {
var indexService = createIndex(
"test-index",
Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(),
jsonBuilder().startObject()
.startObject("properties")
.startObject("object")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
);
for (int i = 0; i < documents.size(); i++) {
var arrayValue = documents.get(i);
var indexRequest = new IndexRequest("test-index");
indexRequest.id("my-id-" + i);
var source = jsonBuilder().startObject();
source.startObject("object");
source.array("field", arrayValue);
source.endObject();
indexRequest.source(source.endObject());
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
client().index(indexRequest).actionGet();
var searchRequest = new SearchRequest("test-index");
searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i));
var searchResponse = client().search(searchRequest).actionGet();
try {
var hit = searchResponse.getHits().getHits()[0];
assertThat(hit.getId(), equalTo("my-id-" + i));
var sourceAsMap = hit.getSourceAsMap();
var objectArray = (Map<?, ?>) sourceAsMap.get("object");
List<?> actual = (List<?>) objectArray.get("field");
if (arrayValue == null) {
assertThat(actual, nullValue());
} else if (arrayValue.length == 0) {
assertThat(actual, empty());
} else {
assertThat(actual, Matchers.contains(arrayValue));
}
} finally {
searchResponse.decRef();
}
}
indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1));
try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) {
var reader = searcher.getDirectoryReader();
for (int i = 0; i < documents.size(); i++) {
var document = reader.storedFields().document(i);
// Verify that there is no ignored source:
Set<String> storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList());
if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) {
assertThat(storedFieldNames, contains("_id"));
} else {
assertThat(storedFieldNames, containsInAnyOrder("_id", "_recovery_source"));
}
}
var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("object.field.offsets");
assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED));
}
}
}

View file

@ -0,0 +1,299 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.nullValue;
public abstract class NativeArrayIntegrationTestCase extends ESSingleNodeTestCase {
public void testSynthesizeEmptyArray() throws Exception {
var arrayValues = new Object[][] { new Object[] {} };
verifySyntheticArray(arrayValues);
}
public void testSynthesizeArrayRandom() throws Exception {
var arrayValues = new Object[randomInt(64)];
for (int j = 0; j < arrayValues.length; j++) {
arrayValues[j] = NetworkAddress.format(randomIp(true));
}
verifySyntheticArray(new Object[][] { arrayValues });
}
public void testSynthesizeArrayInObjectFieldRandom() throws Exception {
List<Object[]> documents = new ArrayList<>();
int numDocs = randomIntBetween(8, 256);
for (int i = 0; i < numDocs; i++) {
Object[] document = new Object[randomInt(64)];
for (int j = 0; j < document.length; j++) {
document[j] = getRandomValue();
}
documents.add(document);
}
verifySyntheticArrayInObject(documents);
}
protected abstract String getFieldTypeName();
protected abstract String getRandomValue();
protected void verifySyntheticArray(Object[][] arrays) throws IOException {
var mapping = jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", getFieldTypeName())
.endObject()
.endObject()
.endObject();
verifySyntheticArray(arrays, mapping, "_id");
}
protected void verifySyntheticArray(Object[][] arrays, XContentBuilder mapping, String... expectedStoredFields) throws IOException {
var indexService = createIndex(
"test-index",
Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(),
mapping
);
for (int i = 0; i < arrays.length; i++) {
var array = arrays[i];
var indexRequest = new IndexRequest("test-index");
indexRequest.id("my-id-" + i);
var source = jsonBuilder().startObject();
if (array != null) {
source.startArray("field");
for (Object arrayValue : array) {
source.value(arrayValue);
}
source.endArray();
} else {
source.field("field").nullValue();
}
indexRequest.source(source.endObject());
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
client().index(indexRequest).actionGet();
var searchRequest = new SearchRequest("test-index");
searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i));
var searchResponse = client().search(searchRequest).actionGet();
try {
var hit = searchResponse.getHits().getHits()[0];
assertThat(hit.getId(), equalTo("my-id-" + i));
var sourceAsMap = hit.getSourceAsMap();
assertThat(sourceAsMap, hasKey("field"));
var actualArray = (List<?>) sourceAsMap.get("field");
if (array == null) {
assertThat(actualArray, nullValue());
} else if (array.length == 0) {
assertThat(actualArray, empty());
} else {
assertThat(actualArray, Matchers.contains(array));
}
} finally {
searchResponse.decRef();
}
}
try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) {
var reader = searcher.getDirectoryReader();
for (int i = 0; i < arrays.length; i++) {
var document = reader.storedFields().document(i);
// Verify that there is no ignored source:
Set<String> storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList());
if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) {
assertThat(storedFieldNames, contains(expectedStoredFields));
} else {
var copyExpectedStoredFields = new String[expectedStoredFields.length + 1];
System.arraycopy(expectedStoredFields, 0, copyExpectedStoredFields, 0, expectedStoredFields.length);
copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_ignored_source";
assertThat(storedFieldNames, containsInAnyOrder(copyExpectedStoredFields));
}
}
var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("field.offsets");
assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED));
}
}
protected void verifySyntheticObjectArray(List<List<Object[]>> documents) throws IOException {
var indexService = createIndex(
"test-index",
Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(),
jsonBuilder().startObject()
.startObject("properties")
.startObject("object")
.startObject("properties")
.startObject("field")
.field("type", getFieldTypeName())
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
);
for (int i = 0; i < documents.size(); i++) {
var document = documents.get(i);
var indexRequest = new IndexRequest("test-index");
indexRequest.id("my-id-" + i);
var source = jsonBuilder().startObject();
source.startArray("object");
for (Object[] arrayValue : document) {
source.startObject();
source.array("field", arrayValue);
source.endObject();
}
source.endArray();
indexRequest.source(source.endObject());
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
client().index(indexRequest).actionGet();
var searchRequest = new SearchRequest("test-index");
searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i));
var searchResponse = client().search(searchRequest).actionGet();
try {
var hit = searchResponse.getHits().getHits()[0];
assertThat(hit.getId(), equalTo("my-id-" + i));
var sourceAsMap = hit.getSourceAsMap();
var objectArray = (List<?>) sourceAsMap.get("object");
for (int j = 0; j < document.size(); j++) {
var expected = document.get(j);
List<?> actual = (List<?>) ((Map<?, ?>) objectArray.get(j)).get("field");
assertThat(actual, Matchers.contains(expected));
}
} finally {
searchResponse.decRef();
}
}
indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1));
try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) {
var reader = searcher.getDirectoryReader();
for (int i = 0; i < documents.size(); i++) {
var document = reader.storedFields().document(i);
// Verify that there is ignored source because of leaf array being wrapped by object array:
List<String> storedFieldNames = document.getFields().stream().map(IndexableField::name).toList();
if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) {
assertThat(storedFieldNames, contains("_id", "_ignored_source"));
} else {
assertThat(storedFieldNames, containsInAnyOrder("_id", "_ignored_source", "_recovery_source"));
}
// Verify that there is no offset field:
LeafReader leafReader = reader.leaves().get(0).reader();
for (FieldInfo fieldInfo : leafReader.getFieldInfos()) {
String name = fieldInfo.getName();
assertFalse("expected no field that contains [offsets] in name, but found [" + name + "]", name.contains("offsets"));
}
var binaryDocValues = leafReader.getBinaryDocValues("object.field.offsets");
assertThat(binaryDocValues, nullValue());
}
}
}
protected void verifySyntheticArrayInObject(List<Object[]> documents) throws IOException {
var indexService = createIndex(
"test-index",
Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(),
jsonBuilder().startObject()
.startObject("properties")
.startObject("object")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
);
for (int i = 0; i < documents.size(); i++) {
var arrayValue = documents.get(i);
var indexRequest = new IndexRequest("test-index");
indexRequest.id("my-id-" + i);
var source = jsonBuilder().startObject();
source.startObject("object");
source.array("field", arrayValue);
source.endObject();
indexRequest.source(source.endObject());
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
client().index(indexRequest).actionGet();
var searchRequest = new SearchRequest("test-index");
searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i));
var searchResponse = client().search(searchRequest).actionGet();
try {
var hit = searchResponse.getHits().getHits()[0];
assertThat(hit.getId(), equalTo("my-id-" + i));
var sourceAsMap = hit.getSourceAsMap();
var objectArray = (Map<?, ?>) sourceAsMap.get("object");
List<?> actual = (List<?>) objectArray.get("field");
if (arrayValue == null) {
assertThat(actual, nullValue());
} else if (arrayValue.length == 0) {
assertThat(actual, empty());
} else {
assertThat(actual, Matchers.contains(arrayValue));
}
} finally {
searchResponse.decRef();
}
}
indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1));
try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) {
var reader = searcher.getDirectoryReader();
for (int i = 0; i < documents.size(); i++) {
var document = reader.storedFields().document(i);
// Verify that there is no ignored source:
Set<String> storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList());
if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) {
assertThat(storedFieldNames, contains("_id"));
} else {
assertThat(storedFieldNames, containsInAnyOrder("_id", "_recovery_source"));
}
}
var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("object.field.offsets");
assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED));
}
}
}

View file

@ -0,0 +1,230 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DirectoryReader;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.nullValue;
public abstract class OffsetDocValuesLoaderTestCase extends MapperServiceTestCase {
@Override
protected Settings getIndexSettings() {
return Settings.builder()
.put("index.mapping.source.mode", "synthetic")
.put("index.mapping.synthetic_source_keep", "arrays")
.build();
}
public void testOffsetArrayNoDocValues() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}",
"doc_values": false
}
}
}
}
""".replace("{{type}}", getFieldTypeName());
try (var mapperService = createMapperService(mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetArrayStored() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}",
"store": true
}
}
}
}
""".replace("{{type}}", getFieldTypeName());
;
try (var mapperService = createMapperService(mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetMultiFields() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}",
"fields": {
"sub": {
"type": "text"
}
}
}
}
}
}
""".replace("{{type}}", getFieldTypeName());
try (var mapperService = createMapperService(mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetArrayNoSyntheticSource() throws Exception {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}"
}
}
}
}
""".replace("{{type}}", getFieldTypeName());
try (var mapperService = createMapperService(Settings.EMPTY, mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetArrayNoSourceArrayKeep() throws Exception {
var settingsBuilder = Settings.builder().put("index.mapping.source.mode", "synthetic");
String mapping;
if (randomBoolean()) {
mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}",
"synthetic_source_keep": "{{synthetic_source_keep}}"
}
}
}
}
""".replace("{{synthetic_source_keep}}", randomBoolean() ? "none" : "all").replace("{{type}}", getFieldTypeName());
} else {
mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}"
}
}
}
}
""".replace("{{type}}", getFieldTypeName());
if (randomBoolean()) {
settingsBuilder.put("index.mapping.synthetic_source_keep", "none");
}
}
try (var mapperService = createMapperService(settingsBuilder.build(), mapping)) {
var fieldMapper = mapperService.mappingLookup().getMapper("field");
assertThat(fieldMapper.getOffsetFieldName(), nullValue());
}
}
public void testOffsetEmptyArray() throws Exception {
verifyOffsets("{\"field\":[]}");
}
public void testOffsetArrayWithNulls() throws Exception {
verifyOffsets("{\"field\":[null,null,null]}");
verifyOffsets("{\"field\":[null,[null],null]}", "{\"field\":[null,null,null]}");
}
public void testOffsetArrayRandom() throws Exception {
StringBuilder values = new StringBuilder();
int numValues = randomIntBetween(0, 256);
for (int i = 0; i < numValues; i++) {
if (randomInt(10) == 1) {
values.append("null");
} else {
String randomValue = randomValue();
values.append('"').append(randomValue).append('"');
}
if (i != (numValues - 1)) {
values.append(',');
}
}
verifyOffsets("{\"field\":[" + values + "]}");
}
protected abstract String getFieldTypeName();
protected abstract String randomValue();
protected void verifyOffsets(String source) throws IOException {
verifyOffsets(source, source);
}
protected void verifyOffsets(String source, String expectedSource) throws IOException {
String mapping = """
{
"_doc": {
"properties": {
"field": {
"type": "{{type}}"
}
}
}
}
""".replace("{{type}}", getFieldTypeName());
verifyOffsets(mapping, source, expectedSource);
}
private void verifyOffsets(String mapping, String source, String expectedSource) throws IOException {
try (var mapperService = createMapperService(mapping)) {
var mapper = mapperService.documentMapper();
try (var directory = newDirectory()) {
var iw = indexWriterForSyntheticSource(directory);
var doc = mapper.parse(new SourceToParse("_id", new BytesArray(source), XContentType.JSON));
doc.updateSeqID(0, 0);
doc.version().setLongValue(0);
iw.addDocuments(doc.docs());
iw.close();
try (var indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) {
FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
var syntheticSourceLoader = fieldMapper.syntheticFieldLoader();
var leafReader = indexReader.leaves().getFirst().reader();
var docValueLoader = syntheticSourceLoader.docValuesLoader(leafReader, new int[] { 0 });
assertTrue(docValueLoader.advanceToDoc(0));
assertTrue(syntheticSourceLoader.hasValue());
XContentBuilder builder = jsonBuilder().startObject();
syntheticSourceLoader.write(builder);
builder.endObject();
var actual = Strings.toString(builder);
assertEquals(expectedSource, actual);
}
}
}
}
}

View file

@ -28,9 +28,11 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.sameInstance;
@ -143,6 +145,41 @@ public class PipelineConfigurationTests extends AbstractXContentTestCase<Pipelin
}
}
@SuppressWarnings("unchecked")
public void testMapKeyOrderingRoundTrip() throws IOException {
// make up two random keys
String key1 = randomAlphaOfLength(10);
String key2 = randomValueOtherThan(key1, () -> randomAlphaOfLength(10));
// stick them as mappings onto themselves in the _meta of a pipeline configuration
// this happens to use the _meta as a convenient map to test that the ordering of the key sets is the same
String configJson = Strings.format("""
{"description": "blah", "_meta" : {"foo": "bar", "%s": "%s", "%s": "%s"}}""", key1, key1, key2, key2);
PipelineConfiguration configuration = new PipelineConfiguration(
"1",
new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)),
XContentType.JSON
);
// serialize it to bytes
XContentType xContentType = randomFrom(XContentType.values());
final BytesReference bytes;
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
configuration.toXContent(builder, ToXContent.EMPTY_PARAMS);
bytes = BytesReference.bytes(builder);
}
// deserialize it back
ContextParser<Void, PipelineConfiguration> parser = PipelineConfiguration.getParser();
XContentParser xContentParser = xContentType.xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, bytes.streamInput());
PipelineConfiguration parsed = parser.parse(xContentParser, null);
// make sure the _meta key sets are in the same order
Set<String> keys1 = ((Map<String, Object>) configuration.getConfig().get("_meta")).keySet();
Set<String> keys2 = ((Map<String, Object>) parsed.getConfig().get("_meta")).keySet();
assertThat(keys1, contains(keys2.toArray(new String[0])));
}
@Override
protected PipelineConfiguration createTestInstance() {
BytesArray config;

View file

@ -78,6 +78,37 @@ public class ScriptStatsTests extends ESTestCase {
assertThat(Strings.toString(builder), equalTo(expected));
}
public void testXContentChunkedHistory() throws Exception {
ScriptStats stats = new ScriptStats(5, 6, 7, new TimeSeries(10, 20, 30, 40), new TimeSeries(100, 200, 300, 400));
final XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
for (var it = stats.toXContentChunked(ToXContent.EMPTY_PARAMS); it.hasNext();) {
it.next().toXContent(builder, ToXContent.EMPTY_PARAMS);
}
builder.endObject();
String expected = """
{
"script" : {
"compilations" : 5,
"cache_evictions" : 6,
"compilation_limit_triggered" : 7,
"compilations_history" : {
"5m" : 10,
"15m" : 20,
"24h" : 30
},
"cache_evictions_history" : {
"5m" : 100,
"15m" : 200,
"24h" : 300
},
"contexts" : [ ]
}
}""";
assertThat(Strings.toString(builder), equalTo(expected));
}
public void testSerializeEmptyTimeSeries() throws IOException {
ScriptContextStats stats = new ScriptContextStats("c", 3333, new TimeSeries(1111), new TimeSeries(2222));

View file

@ -90,6 +90,9 @@ public abstract class ESAllocationTestCase extends ESTestCase {
public OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata) {
return indexMetadata.getForecastedWriteLoad();
}
@Override
public void refreshLicense() {}
};
public static MockAllocationService createAllocationService() {

View file

@ -65,7 +65,7 @@ import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.core.UpdateForV10;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.health.node.selection.HealthNode;
import org.elasticsearch.index.IndexSettings;
@ -2024,11 +2024,12 @@ public abstract class ESRestTestCase extends ESTestCase {
}
/**
* Deprecation message emitted since 7.12.0 for the rest of the 7.x series. Can be removed in v9 since it is not
* emitted in v8. Note that this message is also permitted in certain YAML test cases, it can be removed there too.
* See https://github.com/elastic/elasticsearch/issues/66419 for more details.
* Deprecation message emitted since 7.12.0 for the rest of the 7.x series. Can be removed in v10 since it is not
* emitted in v8 or v9 and N-2 versions are now supported.
* Note that this message is also permitted in certain YAML test cases, it can be removed there too.
* See https://github.com/elastic/elasticsearch/issues/66419 and https://github.com/elastic/elasticsearch/pull/119594 for more details.
*/
@UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT)
@UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION)
private static final String WAIT_FOR_ACTIVE_SHARDS_DEFAULT_DEPRECATION_MESSAGE = "the default value for the ?wait_for_active_shards "
+ "parameter will change from '0' to 'index-setting' in version 8; specify '?wait_for_active_shards=index-setting' "
+ "to adopt the future default behaviour, or '?wait_for_active_shards=0' to preserve today's behaviour";

View file

@ -102,6 +102,8 @@ tasks.named("yamlRestCompatTestTransform").configure({ task ->
task.skipTest("esql/190_lookup_join/alias-pattern-multiple", "LOOKUP JOIN does not support index aliases for now")
task.skipTest("esql/190_lookup_join/alias-pattern-single", "LOOKUP JOIN does not support index aliases for now")
task.skipTest("esql/180_match_operator/match with disjunctions", "Disjunctions in full text functions work now")
task.skipTest("esql/130_spatial/values unsupported for geo_point", "Spatial types are now supported in VALUES aggregation")
task.skipTest("esql/130_spatial/values unsupported for geo_point status code", "Spatial types are now supported in VALUES aggregation")
// Expected deprecation warning to compat yaml tests:
task.addAllowedWarningRegex(".*rollup functionality will be removed in Elasticsearch.*")
task.skipTest("esql/40_tsdb/from doc with aggregate_metric_double", "TODO: support for subset of metric fields")

View file

@ -55,7 +55,8 @@ public record TextEmbeddingBitResults(List<TextEmbeddingByteResults.Embedding> e
if (embeddings.isEmpty()) {
throw new IllegalStateException("Embeddings list is empty");
}
return embeddings.getFirst().values().length;
// bit embeddings are encoded as bytes so convert this to bits
return Byte.SIZE * embeddings.getFirst().values().length;
}
@Override

View file

@ -310,6 +310,9 @@
"upgrade_status": {
"type": "keyword"
},
"upgrade_attempts": {
"type": "date"
},
"user_provided_metadata": {
"type": "object",
"enabled": false

View file

@ -37,3 +37,11 @@ testClusters.configureEach {
artifacts {
restXpackTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test"))
}
tasks.named("yamlRestCompatTestTransform").configure(
{ task ->
// Behavioral Analytics is deprecated with 9.0.0.
task.addAllowedWarning("Behavioral Analytics is deprecated and will be removed in a future release.")
}
)

View file

@ -27,6 +27,8 @@ teardown:
---
"xpack usage includes Enterprise Search":
- requires:
test_runner_features: [ allowed_warnings ]
- do:
xpack.usage: { }
@ -79,6 +81,8 @@ teardown:
query: "{{query_string}}"
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: test-analytics-collection
@ -113,6 +117,8 @@ teardown:
}
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: test-analytics-collection

View file

@ -1,25 +1,39 @@
setup:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: my-test-analytics-collection
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: my-test-analytics-collection2
---
teardown:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: my-test-analytics-collection
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: my-test-analytics-collection2
---
"Get Analytics Collection for a particular collection":
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.get_behavioral_analytics:
name: my-test-analytics-collection
@ -34,6 +48,8 @@ teardown:
---
"Get Analytics Collection list":
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.get_behavioral_analytics:
name:
@ -56,6 +72,8 @@ teardown:
"Get Analytics Collection - Resource does not exist":
- do:
catch: "missing"
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.get_behavioral_analytics:
name: test-nonexistent-analytics-collection

View file

@ -1,11 +1,19 @@
teardown:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: test-analytics-collection
---
"Create Analytics Collection":
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: test-analytics-collection
@ -14,7 +22,11 @@ teardown:
---
"Create Analytics Collection - analytics collection already exists":
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: test-analytics-collection
@ -22,6 +34,8 @@ teardown:
- do:
catch: bad_request
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: test-analytics-collection

View file

@ -1,18 +1,30 @@
setup:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: my-test-analytics-collection
---
teardown:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: my-test-analytics-collection
ignore: 404
---
"Delete Analytics Collection":
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: my-test-analytics-collection
@ -20,13 +32,19 @@ teardown:
- do:
catch: "missing"
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.get_behavioral_analytics:
name: my-test-analytics-collection
---
"Delete Analytics Collection - Analytics Collection does not exist":
- requires:
test_runner_features: [ allowed_warnings ]
- do:
catch: "missing"
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: test-nonexistent-analytics-collection

View file

@ -1,11 +1,19 @@
setup:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: my-test-analytics-collection
---
teardown:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: my-test-analytics-collection
@ -13,11 +21,13 @@ teardown:
# Page view event tests #########################################
---
"Post page_view analytics event":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -31,12 +41,14 @@ teardown:
---
"Post page_view analytics event - Missing page.url":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -49,11 +61,13 @@ teardown:
---
"Post page_view analytics event - With document":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -70,11 +84,13 @@ teardown:
---
"Post page_view analytics event - With page title":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -89,11 +105,13 @@ teardown:
---
"Post page_view analytics event - With referrer":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -108,14 +126,16 @@ teardown:
---
"Post page_view analytics event - debug and session information":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers:
X-Forwarded-For: 192.23.12.12
User-Agent: Mozilla/5.0
Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -152,11 +172,13 @@ teardown:
# Search event tests ############################################
---
"Post search analytics event":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -170,12 +192,14 @@ teardown:
---
"Post search analytics event Missing search query":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -188,11 +212,13 @@ teardown:
---
"Post search analytics event - With sort order":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -208,11 +234,13 @@ teardown:
---
"Post search analytics event - With sort name and direction":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -229,11 +257,13 @@ teardown:
---
"Post search analytics event - With pagination":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -250,11 +280,13 @@ teardown:
---
"Post search analytics event - With search application":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -269,10 +301,12 @@ teardown:
---
"Post search analytics event - With search results":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
@ -302,11 +336,13 @@ teardown:
---
"Post search analytics event - With filters":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -324,14 +360,16 @@ teardown:
---
"Post search analytics event - debug and session information":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers:
X-Forwarded-For: 192.23.12.12
User-Agent: Mozilla/5.0
Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search"
@ -393,11 +431,13 @@ teardown:
# Search click event tests #######################################
---
"Post search_click analytics event":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search_click"
@ -415,14 +455,16 @@ teardown:
---
"Post search_click analytics event - debug and session information":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers:
X-Forwarded-For: 192.23.12.12
User-Agent: Mozilla/5.0
Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search_click"
@ -455,11 +497,13 @@ teardown:
---
"Post search_click analytics event - Page Only":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search_click"
@ -475,11 +519,13 @@ teardown:
---
"Post search_click analytics event - Document Only":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search_click"
@ -496,12 +542,14 @@ teardown:
---
"Post search_click analytics event Missing search query":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search_click"
@ -516,12 +564,14 @@ teardown:
---
"Post search_click analytics event Missing page url and document":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "search_click"
@ -537,12 +587,14 @@ teardown:
# Generic errors tests ###############################################
---
"Post analytics event - Analytics collection does not exist":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "missing"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: test-nonexistent-analytics-collection
event_type: "page_view"
@ -556,12 +608,14 @@ teardown:
---
"Post analytics event - Event type does not exist":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "nonexistent-event-type"
@ -577,12 +631,14 @@ teardown:
---
"Post page_view analytics event - Missing session.id":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -595,12 +651,14 @@ teardown:
---
"Post page_view analytics event - Missing user.id":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "page_view"
@ -613,12 +671,14 @@ teardown:
---
"Post analytics event - Unknown event field":
- skip:
features: headers
- requires:
test_runner_features: [ allowed_warnings, headers ]
- do:
catch: "bad_request"
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.post_behavioral_analytics_event:
collection_name: my-test-analytics-collection
event_type: "nonexistent-event-type"

View file

@ -1,4 +1,6 @@
setup:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
indices.create:
index: test-search-index1
@ -27,6 +29,8 @@ setup:
number_of_replicas: 0
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put:
name: test-search-application
body:
@ -51,6 +55,8 @@ setup:
type: string
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put:
name: test-search-application-1
body:
@ -110,10 +116,14 @@ setup:
refresh: true
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.put_behavioral_analytics:
name: my-test-analytics-collection
---
teardown:
- requires:
test_runner_features: [ allowed_warnings ]
- do:
search_application.delete:
name: test-search-application
@ -145,13 +155,15 @@ teardown:
ignore: 404
- do:
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.delete_behavioral_analytics:
name: my-test-analytics-collection
---
"Query Search Application with API key":
- skip:
features: headers
- requires:
test_runner_features: [ headers, allowed_warnings ]
- do:
headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user
@ -281,6 +293,8 @@ teardown:
catch: forbidden
headers:
Authorization: ApiKey ${api_key_encoded}
allowed_warnings:
- "Behavioral Analytics is deprecated and will be removed in a future release."
search_application.get_behavioral_analytics:
name:
- match: { status: 403 }

View file

@ -233,6 +233,10 @@ public class EnterpriseSearch extends Plugin implements ActionPlugin, SystemInde
private final boolean enabled;
// NOTE: Behavioral Analytics is deprecated in 9.0 but not 8.x.
public static final String BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE =
"Behavioral Analytics is deprecated and will be removed in a future release.";
public EnterpriseSearch(Settings settings) {
this.enabled = XPackSettings.ENTERPRISE_SEARCH_ENABLED.get(settings);
}

View file

@ -28,7 +28,9 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.E
/**
* The {@link AnalyticsCollection} model.
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsCollection implements Writeable, ToXContentObject {
private static final ObjectParser<AnalyticsCollection, String> PARSER = ObjectParser.fromBuilder(

View file

@ -28,7 +28,9 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.E
/**
* A service that allows the resolution of {@link AnalyticsCollection} by name.
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsCollectionResolver {
private final IndexNameExpressionResolver indexNameExpressionResolver;

View file

@ -33,7 +33,9 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN;
* Until we have more specific need the {@link AnalyticsCollection} is just another representation
* of a {@link org.elasticsearch.cluster.metadata.DataStream}.
* As a consequence, this service is mostly a facade for the data stream API.
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsCollectionService {
private static final Logger logger = LogManager.getLogger(AnalyticsCollectionService.class);

View file

@ -7,6 +7,10 @@
package org.elasticsearch.xpack.application.analytics;
/**
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsConstants {
private AnalyticsConstants() {}

View file

@ -17,7 +17,9 @@ import java.util.Objects;
/**
* Event emitter will index Analytics events submitted through a @{PostAnalyticsEventAction.Request} request.
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsEventIngestService {
private final AnalyticsCollectionResolver collectionResolver;

View file

@ -31,6 +31,10 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.R
import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.TEMPLATE_VERSION_VARIABLE;
import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN;
/**
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsTemplateRegistry extends IndexTemplateRegistry {
// This number must be incremented when we make changes to built-in templates.

View file

@ -24,6 +24,10 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* @deprecated in 9.0
*/
@Deprecated
public class DeleteAnalyticsCollectionAction {
public static final String NAME = "cluster:admin/xpack/application/analytics/delete";

View file

@ -25,6 +25,10 @@ import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* @deprecated in 9.0
*/
@Deprecated
public class GetAnalyticsCollectionAction {
public static final String NAME = "cluster:admin/xpack/application/analytics/get";

View file

@ -36,6 +36,10 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
/**
* @deprecated in 9.0
*/
@Deprecated
public class PostAnalyticsEventAction {
public static final String NAME = "cluster:admin/xpack/application/analytics/post_event";

View file

@ -23,6 +23,10 @@ import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* @deprecated in 9.0
*/
@Deprecated
public class PutAnalyticsCollectionAction {
public static final String NAME = "cluster:admin/xpack/application/analytics/put";

View file

@ -23,6 +23,10 @@ import java.util.List;
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
/**
* @deprecated in 9.0
*/
@Deprecated
@ServerlessScope(Scope.PUBLIC)
public class RestDeleteAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler {
public RestDeleteAnalyticsCollectionAction(XPackLicenseState licenseState) {

View file

@ -23,6 +23,10 @@ import java.util.List;
import static org.elasticsearch.rest.RestRequest.Method.GET;
/**
* @deprecated in 9.0
*/
@Deprecated
@ServerlessScope(Scope.PUBLIC)
public class RestGetAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler {
public RestGetAnalyticsCollectionAction(XPackLicenseState licenseState) {

View file

@ -29,6 +29,10 @@ import java.util.Map;
import static org.elasticsearch.rest.RestRequest.Method.POST;
/**
* @deprecated in 9.0
*/
@Deprecated
@ServerlessScope(Scope.PUBLIC)
public class RestPostAnalyticsEventAction extends EnterpriseSearchBaseRestHandler {
public RestPostAnalyticsEventAction(XPackLicenseState licenseState) {

View file

@ -24,6 +24,10 @@ import java.util.List;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/**
* @deprecated in 9.0
*/
@Deprecated
@ServerlessScope(Scope.PUBLIC)
public class RestPutAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler {
public RestPutAnalyticsCollectionAction(XPackLicenseState licenseState) {

View file

@ -15,6 +15,8 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.Task;
@ -22,6 +24,13 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE;
/**
* @deprecated in 9.0
*/
@Deprecated
public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransportMasterNodeAction<
DeleteAnalyticsCollectionAction.Request> {
@ -59,6 +68,8 @@ public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransp
ClusterState state,
ActionListener<AcknowledgedResponse> listener
) {
DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class)
.warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE);
analyticsCollectionService.deleteAnalyticsCollection(state, request, listener.map(v -> AcknowledgedResponse.TRUE));
}
}

View file

@ -13,6 +13,8 @@ import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.Task;
@ -20,6 +22,13 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE;
/**
* @deprecated in 9.0
*/
@Deprecated
public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeReadAction<
GetAnalyticsCollectionAction.Request,
GetAnalyticsCollectionAction.Response> {
@ -54,6 +63,8 @@ public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeRe
ClusterState state,
ActionListener<GetAnalyticsCollectionAction.Response> listener
) {
DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class)
.warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE);
analyticsCollectionService.getAnalyticsCollection(state, request, listener);
}

View file

@ -10,17 +10,25 @@ package org.elasticsearch.xpack.application.analytics.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.application.analytics.AnalyticsEventIngestService;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE;
/**
* Transport implementation for the {@link PostAnalyticsEventAction}.
* It executes the {@link AnalyticsEventIngestService#addEvent} method if the XPack license is valid, else it calls
* the listener's onFailure method with the appropriate exception.
* @deprecated in 9.0
*/
@Deprecated
public class TransportPostAnalyticsEventAction extends HandledTransportAction<
PostAnalyticsEventAction.Request,
PostAnalyticsEventAction.Response> {
@ -31,7 +39,8 @@ public class TransportPostAnalyticsEventAction extends HandledTransportAction<
public TransportPostAnalyticsEventAction(
TransportService transportService,
ActionFilters actionFilters,
AnalyticsEventIngestService eventEmitterService
AnalyticsEventIngestService eventEmitterService,
ClusterService clusterService
) {
super(
PostAnalyticsEventAction.NAME,
@ -49,6 +58,8 @@ public class TransportPostAnalyticsEventAction extends HandledTransportAction<
PostAnalyticsEventAction.Request request,
ActionListener<PostAnalyticsEventAction.Response> listener
) {
DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class)
.warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE);
this.eventEmitterService.addEvent(request, listener);
}
}

View file

@ -14,13 +14,23 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.features.FeatureService;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT;
import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE;
/**
* @deprecated in 9.0
*/
@Deprecated
public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAction<
PutAnalyticsCollectionAction.Request,
PutAnalyticsCollectionAction.Response> {
@ -33,7 +43,8 @@ public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAc
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
AnalyticsCollectionService analyticsCollectionService
AnalyticsCollectionService analyticsCollectionService,
FeatureService featureService
) {
super(
PutAnalyticsCollectionAction.NAME,
@ -60,6 +71,8 @@ public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAc
ClusterState state,
ActionListener<PutAnalyticsCollectionAction.Response> listener
) {
DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class)
.warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE);
analyticsCollectionService.putAnalyticsCollection(state, request, listener);
}

View file

@ -31,7 +31,9 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.E
/**
* This class represents Analytics events object meant to be emitted to the event queue.
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsEvent implements Writeable, ToXContentObject {
public static final ParseField TIMESTAMP_FIELD = new ParseField("@timestamp");

View file

@ -29,7 +29,9 @@ import static org.elasticsearch.xpack.application.analytics.event.AnalyticsEvent
/**
* A utility class for parsing {@link AnalyticsEvent} objects from payloads (such as HTTP POST request bodies) or input streams.
* @deprecated in 9.0
*/
@Deprecated
public class AnalyticsEventFactory {
public static final AnalyticsEventFactory INSTANCE = new AnalyticsEventFactory();

View file

@ -22,6 +22,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.P
import static org.elasticsearch.xpack.application.analytics.event.parser.field.SessionAnalyticsEventField.SESSION_FIELD;
import static org.elasticsearch.xpack.application.analytics.event.parser.field.UserAnalyticsEventField.USER_FIELD;
/**
* @deprecated in 9.0
*/
@Deprecated
public class PageViewAnalyticsEvent {
private static final ObjectParser<AnalyticsEvent.Builder, AnalyticsEvent.Context> PARSER = ObjectParser.fromBuilder(
"page_view_event",

View file

@ -20,6 +20,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.S
import static org.elasticsearch.xpack.application.analytics.event.parser.field.SessionAnalyticsEventField.SESSION_FIELD;
import static org.elasticsearch.xpack.application.analytics.event.parser.field.UserAnalyticsEventField.USER_FIELD;
/**
* @deprecated in 9.0
*/
@Deprecated
public class SearchAnalyticsEvent {
private static final ObjectParser<AnalyticsEvent.Builder, AnalyticsEvent.Context> PARSER = ObjectParser.fromBuilder(
"search_event",

View file

@ -24,6 +24,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.S
import static org.elasticsearch.xpack.application.analytics.event.parser.field.SessionAnalyticsEventField.SESSION_FIELD;
import static org.elasticsearch.xpack.application.analytics.event.parser.field.UserAnalyticsEventField.USER_FIELD;
/**
* @deprecated in 9.0
*/
@Deprecated
public class SearchClickAnalyticsEvent {
private static final ObjectParser<AnalyticsEvent.Builder, AnalyticsEvent.Context> PARSER = ObjectParser.fromBuilder(

View file

@ -18,6 +18,10 @@ import java.util.Map;
import static org.elasticsearch.common.Strings.requireNonBlank;
/**
* @deprecated in 9.0
*/
@Deprecated
public class DocumentAnalyticsEventField {
public static final ParseField DOCUMENT_FIELD = new ParseField("document");

Some files were not shown because too many files have changed in this diff Show more