mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-25 07:37:19 -04:00
Merge main into multi-project
This commit is contained in:
commit
f7791a0f5f
235 changed files with 6205 additions and 1372 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -69,3 +69,6 @@ testfixtures_shared/
|
||||||
# Generated
|
# Generated
|
||||||
checkstyle_ide.xml
|
checkstyle_ide.xml
|
||||||
x-pack/plugin/esql/src/main/generated-src/generated/
|
x-pack/plugin/esql/src/main/generated-src/generated/
|
||||||
|
|
||||||
|
# JEnv
|
||||||
|
.java-version
|
||||||
|
|
|
@ -31,11 +31,15 @@ import org.gradle.api.tasks.TaskProvider;
|
||||||
import org.gradle.api.tasks.bundling.Jar;
|
import org.gradle.api.tasks.bundling.Jar;
|
||||||
import org.gradle.api.tasks.javadoc.Javadoc;
|
import org.gradle.api.tasks.javadoc.Javadoc;
|
||||||
import org.gradle.external.javadoc.CoreJavadocOptions;
|
import org.gradle.external.javadoc.CoreJavadocOptions;
|
||||||
|
import org.gradle.jvm.toolchain.JavaLanguageVersion;
|
||||||
|
import org.gradle.jvm.toolchain.JavaToolchainService;
|
||||||
import org.gradle.language.base.plugins.LifecycleBasePlugin;
|
import org.gradle.language.base.plugins.LifecycleBasePlugin;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
|
||||||
import static org.elasticsearch.gradle.internal.conventions.util.Util.toStringable;
|
import static org.elasticsearch.gradle.internal.conventions.util.Util.toStringable;
|
||||||
import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams;
|
import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams;
|
||||||
|
|
||||||
|
@ -44,6 +48,14 @@ import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams
|
||||||
* common configuration for production code.
|
* common configuration for production code.
|
||||||
*/
|
*/
|
||||||
public class ElasticsearchJavaPlugin implements Plugin<Project> {
|
public class ElasticsearchJavaPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
|
private final JavaToolchainService javaToolchains;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
ElasticsearchJavaPlugin(JavaToolchainService javaToolchains) {
|
||||||
|
this.javaToolchains = javaToolchains;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void apply(Project project) {
|
public void apply(Project project) {
|
||||||
project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class);
|
project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class);
|
||||||
|
@ -55,7 +67,7 @@ public class ElasticsearchJavaPlugin implements Plugin<Project> {
|
||||||
// configureConfigurations(project);
|
// configureConfigurations(project);
|
||||||
configureJars(project, buildParams.get());
|
configureJars(project, buildParams.get());
|
||||||
configureJarManifest(project, buildParams.get());
|
configureJarManifest(project, buildParams.get());
|
||||||
configureJavadoc(project);
|
configureJavadoc(project, buildParams.get());
|
||||||
testCompileOnlyDeps(project);
|
testCompileOnlyDeps(project);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,7 +140,7 @@ public class ElasticsearchJavaPlugin implements Plugin<Project> {
|
||||||
project.getPluginManager().apply("nebula.info-jar");
|
project.getPluginManager().apply("nebula.info-jar");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void configureJavadoc(Project project) {
|
private void configureJavadoc(Project project, BuildParameterExtension buildParams) {
|
||||||
project.getTasks().withType(Javadoc.class).configureEach(javadoc -> {
|
project.getTasks().withType(Javadoc.class).configureEach(javadoc -> {
|
||||||
/*
|
/*
|
||||||
* Generate docs using html5 to suppress a warning from `javadoc`
|
* Generate docs using html5 to suppress a warning from `javadoc`
|
||||||
|
@ -136,6 +148,10 @@ public class ElasticsearchJavaPlugin implements Plugin<Project> {
|
||||||
*/
|
*/
|
||||||
CoreJavadocOptions javadocOptions = (CoreJavadocOptions) javadoc.getOptions();
|
CoreJavadocOptions javadocOptions = (CoreJavadocOptions) javadoc.getOptions();
|
||||||
javadocOptions.addBooleanOption("html5", true);
|
javadocOptions.addBooleanOption("html5", true);
|
||||||
|
|
||||||
|
javadoc.getJavadocTool().set(javaToolchains.javadocToolFor(spec -> {
|
||||||
|
spec.getLanguageVersion().set(JavaLanguageVersion.of(buildParams.getMinimumRuntimeVersion().getMajorVersion()));
|
||||||
|
}));
|
||||||
});
|
});
|
||||||
|
|
||||||
TaskProvider<Javadoc> javadoc = project.getTasks().withType(Javadoc.class).named("javadoc");
|
TaskProvider<Javadoc> javadoc = project.getTasks().withType(Javadoc.class).named("javadoc");
|
||||||
|
|
|
@ -17,12 +17,12 @@ import org.gradle.api.InvalidUserDataException;
|
||||||
import org.gradle.api.Plugin;
|
import org.gradle.api.Plugin;
|
||||||
import org.gradle.api.Project;
|
import org.gradle.api.Project;
|
||||||
import org.gradle.api.Task;
|
import org.gradle.api.Task;
|
||||||
|
import org.gradle.api.file.FileSystemOperations;
|
||||||
import org.gradle.api.file.ProjectLayout;
|
import org.gradle.api.file.ProjectLayout;
|
||||||
import org.gradle.api.model.ObjectFactory;
|
import org.gradle.api.model.ObjectFactory;
|
||||||
import org.gradle.api.plugins.JvmToolchainsPlugin;
|
import org.gradle.api.plugins.JvmToolchainsPlugin;
|
||||||
import org.gradle.api.provider.Provider;
|
import org.gradle.api.provider.Provider;
|
||||||
import org.gradle.api.provider.ProviderFactory;
|
import org.gradle.api.provider.ProviderFactory;
|
||||||
import org.gradle.api.tasks.Copy;
|
|
||||||
import org.gradle.api.tasks.PathSensitivity;
|
import org.gradle.api.tasks.PathSensitivity;
|
||||||
import org.gradle.api.tasks.TaskProvider;
|
import org.gradle.api.tasks.TaskProvider;
|
||||||
import org.gradle.jvm.toolchain.JavaToolchainService;
|
import org.gradle.jvm.toolchain.JavaToolchainService;
|
||||||
|
@ -54,11 +54,17 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
||||||
private final ObjectFactory objectFactory;
|
private final ObjectFactory objectFactory;
|
||||||
private ProviderFactory providerFactory;
|
private ProviderFactory providerFactory;
|
||||||
private JavaToolchainService toolChainService;
|
private JavaToolchainService toolChainService;
|
||||||
|
private FileSystemOperations fileSystemOperations;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public InternalDistributionBwcSetupPlugin(ObjectFactory objectFactory, ProviderFactory providerFactory) {
|
public InternalDistributionBwcSetupPlugin(
|
||||||
|
ObjectFactory objectFactory,
|
||||||
|
ProviderFactory providerFactory,
|
||||||
|
FileSystemOperations fileSystemOperations
|
||||||
|
) {
|
||||||
this.objectFactory = objectFactory;
|
this.objectFactory = objectFactory;
|
||||||
this.providerFactory = providerFactory;
|
this.providerFactory = providerFactory;
|
||||||
|
this.fileSystemOperations = fileSystemOperations;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -76,7 +82,8 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
||||||
providerFactory,
|
providerFactory,
|
||||||
objectFactory,
|
objectFactory,
|
||||||
toolChainService,
|
toolChainService,
|
||||||
isCi
|
isCi,
|
||||||
|
fileSystemOperations
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -88,7 +95,8 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
||||||
ProviderFactory providerFactory,
|
ProviderFactory providerFactory,
|
||||||
ObjectFactory objectFactory,
|
ObjectFactory objectFactory,
|
||||||
JavaToolchainService toolChainService,
|
JavaToolchainService toolChainService,
|
||||||
Boolean isCi
|
Boolean isCi,
|
||||||
|
FileSystemOperations fileSystemOperations
|
||||||
) {
|
) {
|
||||||
ProjectLayout layout = project.getLayout();
|
ProjectLayout layout = project.getLayout();
|
||||||
Provider<BwcVersions.UnreleasedVersionInfo> versionInfoProvider = providerFactory.provider(() -> versionInfo);
|
Provider<BwcVersions.UnreleasedVersionInfo> versionInfoProvider = providerFactory.provider(() -> versionInfo);
|
||||||
|
@ -120,11 +128,18 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
||||||
List<DistributionProject> distributionProjects = resolveArchiveProjects(checkoutDir.get(), bwcVersion.get());
|
List<DistributionProject> distributionProjects = resolveArchiveProjects(checkoutDir.get(), bwcVersion.get());
|
||||||
|
|
||||||
// Setup gradle user home directory
|
// Setup gradle user home directory
|
||||||
project.getTasks().register("setupGradleUserHome", Copy.class, copy -> {
|
// We don't use a normal `Copy` task here as snapshotting the entire gradle user home is very expensive. This task is cheap, so
|
||||||
copy.into(project.getGradle().getGradleUserHomeDir().getAbsolutePath() + "-" + project.getName());
|
// up-to-date checking doesn't buy us much
|
||||||
copy.from(project.getGradle().getGradleUserHomeDir().getAbsolutePath(), copySpec -> {
|
project.getTasks().register("setupGradleUserHome", task -> {
|
||||||
copySpec.include("gradle.properties");
|
task.doLast(t -> {
|
||||||
copySpec.include("init.d/*");
|
fileSystemOperations.copy(copy -> {
|
||||||
|
String gradleUserHome = project.getGradle().getGradleUserHomeDir().getAbsolutePath();
|
||||||
|
copy.into(gradleUserHome + "-" + project.getName());
|
||||||
|
copy.from(gradleUserHome, copySpec -> {
|
||||||
|
copySpec.include("gradle.properties");
|
||||||
|
copySpec.include("init.d/*");
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -86,14 +86,14 @@ public class MrjarPlugin implements Plugin<Project> {
|
||||||
configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME), 21);
|
configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME), 21);
|
||||||
for (int javaVersion : mainVersions) {
|
for (int javaVersion : mainVersions) {
|
||||||
String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion;
|
String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion;
|
||||||
SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion);
|
SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion, true);
|
||||||
configureSourceSetInJar(project, mainSourceSet, javaVersion);
|
configureSourceSetInJar(project, mainSourceSet, javaVersion);
|
||||||
addJar(project, mainSourceSet, javaVersion);
|
addJar(project, mainSourceSet, javaVersion);
|
||||||
mainSourceSets.add(mainSourceSetName);
|
mainSourceSets.add(mainSourceSetName);
|
||||||
testSourceSets.add(mainSourceSetName);
|
testSourceSets.add(mainSourceSetName);
|
||||||
|
|
||||||
String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion;
|
String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion;
|
||||||
SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion);
|
SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion, false);
|
||||||
testSourceSets.add(testSourceSetName);
|
testSourceSets.add(testSourceSetName);
|
||||||
createTestTask(project, buildParams, testSourceSet, javaVersion, mainSourceSets);
|
createTestTask(project, buildParams, testSourceSet, javaVersion, mainSourceSets);
|
||||||
}
|
}
|
||||||
|
@ -121,7 +121,8 @@ public class MrjarPlugin implements Plugin<Project> {
|
||||||
JavaPluginExtension javaExtension,
|
JavaPluginExtension javaExtension,
|
||||||
String sourceSetName,
|
String sourceSetName,
|
||||||
List<String> parentSourceSets,
|
List<String> parentSourceSets,
|
||||||
int javaVersion
|
int javaVersion,
|
||||||
|
boolean isMainSourceSet
|
||||||
) {
|
) {
|
||||||
SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourceSetName);
|
SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourceSetName);
|
||||||
for (String parentSourceSetName : parentSourceSets) {
|
for (String parentSourceSetName : parentSourceSets) {
|
||||||
|
@ -135,6 +136,13 @@ public class MrjarPlugin implements Plugin<Project> {
|
||||||
CompileOptions compileOptions = compileTask.getOptions();
|
CompileOptions compileOptions = compileTask.getOptions();
|
||||||
compileOptions.getRelease().set(javaVersion);
|
compileOptions.getRelease().set(javaVersion);
|
||||||
});
|
});
|
||||||
|
if (isMainSourceSet) {
|
||||||
|
project.getTasks().create(sourceSet.getJavadocTaskName(), Javadoc.class, javadocTask -> {
|
||||||
|
javadocTask.getJavadocTool().set(javaToolchains.javadocToolFor(spec -> {
|
||||||
|
spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion));
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
configurePreviewFeatures(project, sourceSet, javaVersion);
|
configurePreviewFeatures(project, sourceSet, javaVersion);
|
||||||
|
|
||||||
// Since we configure MRJAR sourcesets to allow preview apis, class signatures for those
|
// Since we configure MRJAR sourcesets to allow preview apis, class signatures for those
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
## should create one or more files in the jvm.options.d
|
## should create one or more files in the jvm.options.d
|
||||||
## directory containing your adjustments.
|
## directory containing your adjustments.
|
||||||
##
|
##
|
||||||
## See https://www.elastic.co/guide/en/elasticsearch/reference/@project.minor.version@/jvm-options.html
|
## See https://www.elastic.co/guide/en/elasticsearch/reference/@project.minor.version@/advanced-configuration.html#set-jvm-options
|
||||||
## for more information.
|
## for more information.
|
||||||
##
|
##
|
||||||
################################################################
|
################################################################
|
||||||
|
|
|
@ -130,8 +130,9 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach {
|
||||||
setting 'xpack.security.enabled', 'true'
|
setting 'xpack.security.enabled', 'true'
|
||||||
setting 'xpack.security.authc.api_key.enabled', 'true'
|
setting 'xpack.security.authc.api_key.enabled', 'true'
|
||||||
setting 'xpack.security.authc.token.enabled', 'true'
|
setting 'xpack.security.authc.token.enabled', 'true'
|
||||||
// disable the ILM history for doc tests to avoid potential lingering tasks that'd cause test flakiness
|
// disable the ILM and SLM history for doc tests to avoid potential lingering tasks that'd cause test flakiness
|
||||||
setting 'indices.lifecycle.history_index_enabled', 'false'
|
setting 'indices.lifecycle.history_index_enabled', 'false'
|
||||||
|
setting 'slm.history_index_enabled', 'false'
|
||||||
setting 'xpack.license.self_generated.type', 'trial'
|
setting 'xpack.license.self_generated.type', 'trial'
|
||||||
setting 'xpack.security.authc.realms.file.file.order', '0'
|
setting 'xpack.security.authc.realms.file.file.order', '0'
|
||||||
setting 'xpack.security.authc.realms.native.native.order', '1'
|
setting 'xpack.security.authc.realms.native.native.order', '1'
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
pr: 116423
|
|
||||||
summary: Support mTLS for the Elastic Inference Service integration inside the inference API
|
|
||||||
area: Machine Learning
|
|
||||||
type: feature
|
|
||||||
issues: []
|
|
5
docs/changelog/117778.yaml
Normal file
5
docs/changelog/117778.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 117778
|
||||||
|
summary: "[Connector APIs] Enforce index prefix for managed connectors"
|
||||||
|
area: Extract&Transform
|
||||||
|
type: feature
|
||||||
|
issues: []
|
5
docs/changelog/118266.yaml
Normal file
5
docs/changelog/118266.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 118266
|
||||||
|
summary: Prevent data nodes from sending stack traces to coordinator when `error_trace=false`
|
||||||
|
area: Search
|
||||||
|
type: enhancement
|
||||||
|
issues: []
|
22
docs/changelog/118366.yaml
Normal file
22
docs/changelog/118366.yaml
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
pr: 118366
|
||||||
|
summary: |-
|
||||||
|
Configuring a bind DN in an LDAP or Active Directory (AD) realm without a corresponding bind password
|
||||||
|
will prevent node from starting
|
||||||
|
area: Authentication
|
||||||
|
type: breaking
|
||||||
|
issues: []
|
||||||
|
breaking:
|
||||||
|
title: -|
|
||||||
|
Configuring a bind DN in an LDAP or Active Directory (AD) realm without
|
||||||
|
a corresponding bind password will prevent node from starting
|
||||||
|
area: Cluster and node setting
|
||||||
|
details: -|
|
||||||
|
For LDAP or AD authentication realms, setting a bind DN (via the
|
||||||
|
`xpack.security.authc.realms.ldap.*.bind_dn` or `xpack.security.authc.realms.active_directory.*.bind_dn`
|
||||||
|
realm settings) without a bind password is a misconfiguration that may prevent successful authentication
|
||||||
|
to the node. Nodes will fail to start if a bind DN is specified without a password.
|
||||||
|
impact: -|
|
||||||
|
If you have a bind DN configured for an LDAP or AD authentication
|
||||||
|
realm, set a bind password for {ref}/ldap-realm.html#ldap-realm-configuration[LDAP]
|
||||||
|
or {ref}/active-directory-realm.html#ad-realm-configuration[Active Directory].
|
||||||
|
Configuring a bind DN without a password prevents the misconfigured node from starting.
|
7
docs/changelog/118585.yaml
Normal file
7
docs/changelog/118585.yaml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
pr: 118585
|
||||||
|
summary: Add a generic `rescorer` retriever based on the search request's rescore
|
||||||
|
functionality
|
||||||
|
area: Ranking
|
||||||
|
type: feature
|
||||||
|
issues:
|
||||||
|
- 118327
|
5
docs/changelog/118757.yaml
Normal file
5
docs/changelog/118757.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 118757
|
||||||
|
summary: Improve handling of nested fields in index reader wrappers
|
||||||
|
area: Authorization
|
||||||
|
type: enhancement
|
||||||
|
issues: []
|
6
docs/changelog/118816.yaml
Normal file
6
docs/changelog/118816.yaml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
pr: 118816
|
||||||
|
summary: Support flattened field with downsampling
|
||||||
|
area: Downsampling
|
||||||
|
type: bug
|
||||||
|
issues:
|
||||||
|
- 116319
|
|
@ -1,4 +1,4 @@
|
||||||
pr: 116944
|
pr: 118825
|
||||||
summary: "Remove support for type, fields, `copy_to` and boost in metadata field definition"
|
summary: "Remove support for type, fields, `copy_to` and boost in metadata field definition"
|
||||||
area: Mapping
|
area: Mapping
|
||||||
type: breaking
|
type: breaking
|
||||||
|
@ -6,6 +6,6 @@ issues: []
|
||||||
breaking:
|
breaking:
|
||||||
title: "Remove support for type, fields, copy_to and boost in metadata field definition"
|
title: "Remove support for type, fields, copy_to and boost in metadata field definition"
|
||||||
area: Mapping
|
area: Mapping
|
||||||
details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition
|
details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition starting with version 9.
|
||||||
impact: Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings.
|
impact: Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings.
|
||||||
notable: false
|
notable: false
|
5
docs/changelog/118858.yaml
Normal file
5
docs/changelog/118858.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 118858
|
||||||
|
summary: Lookup join on multiple join fields not yet supported
|
||||||
|
area: ES|QL
|
||||||
|
type: enhancement
|
||||||
|
issues: []
|
5
docs/changelog/118890.yaml
Normal file
5
docs/changelog/118890.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 118890
|
||||||
|
summary: Add action to create index from a source index
|
||||||
|
area: Data streams
|
||||||
|
type: enhancement
|
||||||
|
issues: []
|
6
docs/changelog/119007.yaml
Normal file
6
docs/changelog/119007.yaml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
pr: 119007
|
||||||
|
summary: Block-writes cannot be added after read-only
|
||||||
|
area: Data streams
|
||||||
|
type: bug
|
||||||
|
issues:
|
||||||
|
- 119002
|
|
@ -8,7 +8,7 @@ The logic for content extraction is defined in {connectors-python}/connectors/ut
|
||||||
While intended primarily for PDF and Microsoft Office formats, you can use any of the <<es-connectors-content-extraction-supported-file-types, supported formats>>.
|
While intended primarily for PDF and Microsoft Office formats, you can use any of the <<es-connectors-content-extraction-supported-file-types, supported formats>>.
|
||||||
|
|
||||||
Enterprise Search uses an {ref}/ingest.html[Elasticsearch ingest pipeline^] to power the web crawler's binary content extraction.
|
Enterprise Search uses an {ref}/ingest.html[Elasticsearch ingest pipeline^] to power the web crawler's binary content extraction.
|
||||||
The default pipeline, `ent-search-generic-ingestion`, is automatically created when Enterprise Search first starts.
|
The default pipeline, `search-default-ingestion`, is automatically created when Enterprise Search first starts.
|
||||||
|
|
||||||
You can {ref}/ingest.html#create-manage-ingest-pipelines[view^] this pipeline in Kibana.
|
You can {ref}/ingest.html#create-manage-ingest-pipelines[view^] this pipeline in Kibana.
|
||||||
Customizing your pipeline usage is also an option.
|
Customizing your pipeline usage is also an option.
|
||||||
|
|
|
@ -13,7 +13,7 @@ The following diagram provides an overview of how content extraction, sync rules
|
||||||
[.screenshot]
|
[.screenshot]
|
||||||
image::images/pipelines-extraction-sync-rules.png[Architecture diagram of data pipeline with content extraction, sync rules, and ingest pipelines]
|
image::images/pipelines-extraction-sync-rules.png[Architecture diagram of data pipeline with content extraction, sync rules, and ingest pipelines]
|
||||||
|
|
||||||
By default, only the connector specific logic (2) and the default `ent-search-generic-ingestion` pipeline (6) extract and transform your data, as configured in your deployment.
|
By default, only the connector specific logic (2) and the default `search-default-ingestion` pipeline (6) extract and transform your data, as configured in your deployment.
|
||||||
|
|
||||||
The following tools are available for more advanced use cases:
|
The following tools are available for more advanced use cases:
|
||||||
|
|
||||||
|
@ -50,4 +50,4 @@ Use ingest pipelines for data enrichment, normalization, and more.
|
||||||
|
|
||||||
Elastic connectors use a default ingest pipeline, which you can copy and customize to meet your needs.
|
Elastic connectors use a default ingest pipeline, which you can copy and customize to meet your needs.
|
||||||
|
|
||||||
Refer to {ref}/ingest-pipeline-search.html[ingest pipelines in Search] in the {es} documentation.
|
Refer to {ref}/ingest-pipeline-search.html[ingest pipelines in Search] in the {es} documentation.
|
||||||
|
|
|
@ -116,4 +116,18 @@ include::{esql-specs}/bucket.csv-spec[tag=reuseGroupingFunctionWithExpression]
|
||||||
|===
|
|===
|
||||||
include::{esql-specs}/bucket.csv-spec[tag=reuseGroupingFunctionWithExpression-result]
|
include::{esql-specs}/bucket.csv-spec[tag=reuseGroupingFunctionWithExpression-result]
|
||||||
|===
|
|===
|
||||||
|
Sometimes you need to change the start value of each bucket by a given duration (similar to date histogram
|
||||||
|
aggregation's <<search-aggregations-bucket-histogram-aggregation,`offset`>> parameter). To do so, you will need to
|
||||||
|
take into account how the language handles expressions within the `STATS` command: if these contain functions or
|
||||||
|
arithmetic operators, a virtual `EVAL` is inserted before and/or after the `STATS` command. Consequently, a double
|
||||||
|
compensation is needed to adjust the bucketed date value before the aggregation and then again after. For instance,
|
||||||
|
inserting a negative offset of `1 hour` to buckets of `1 year` looks like this:
|
||||||
|
[source.merge.styled,esql]
|
||||||
|
----
|
||||||
|
include::{esql-specs}/bucket.csv-spec[tag=bucketWithOffset]
|
||||||
|
----
|
||||||
|
[%header.monospaced.styled,format=dsv,separator=|]
|
||||||
|
|===
|
||||||
|
include::{esql-specs}/bucket.csv-spec[tag=bucketWithOffset-result]
|
||||||
|
|===
|
||||||
|
|
||||||
|
|
|
@ -1598,7 +1598,8 @@
|
||||||
"FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS c = COUNT(1) BY b = BUCKET(salary, 5000.)\n| SORT b",
|
"FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS c = COUNT(1) BY b = BUCKET(salary, 5000.)\n| SORT b",
|
||||||
"FROM sample_data \n| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW()\n| STATS COUNT(*) BY bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW())",
|
"FROM sample_data \n| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW()\n| STATS COUNT(*) BY bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW())",
|
||||||
"FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS AVG(salary) BY bucket = BUCKET(hire_date, 20, \"1985-01-01T00:00:00Z\", \"1986-01-01T00:00:00Z\")\n| SORT bucket",
|
"FROM employees\n| WHERE hire_date >= \"1985-01-01T00:00:00Z\" AND hire_date < \"1986-01-01T00:00:00Z\"\n| STATS AVG(salary) BY bucket = BUCKET(hire_date, 20, \"1985-01-01T00:00:00Z\", \"1986-01-01T00:00:00Z\")\n| SORT bucket",
|
||||||
"FROM employees\n| STATS s1 = b1 + 1, s2 = BUCKET(salary / 1000 + 999, 50.) + 2 BY b1 = BUCKET(salary / 100 + 99, 50.), b2 = BUCKET(salary / 1000 + 999, 50.)\n| SORT b1, b2\n| KEEP s1, b1, s2, b2"
|
"FROM employees\n| STATS s1 = b1 + 1, s2 = BUCKET(salary / 1000 + 999, 50.) + 2 BY b1 = BUCKET(salary / 100 + 99, 50.), b2 = BUCKET(salary / 1000 + 999, 50.)\n| SORT b1, b2\n| KEEP s1, b1, s2, b2",
|
||||||
|
"FROM employees \n| STATS dates = VALUES(birth_date) BY b = BUCKET(birth_date + 1 HOUR, 1 YEAR) - 1 HOUR\n| EVAL d_count = MV_COUNT(dates)\n| SORT d_count\n| LIMIT 3"
|
||||||
],
|
],
|
||||||
"preview" : false,
|
"preview" : false,
|
||||||
"snapshot_only" : false
|
"snapshot_only" : false
|
||||||
|
|
|
@ -88,7 +88,7 @@ The `monitor_ml` <<security-privileges, Elasticsearch cluster privilege>> is req
|
||||||
|
|
||||||
To create the index-specific ML inference pipeline, go to *Search -> Content -> Indices -> <your index> -> Pipelines* in the Kibana UI.
|
To create the index-specific ML inference pipeline, go to *Search -> Content -> Indices -> <your index> -> Pipelines* in the Kibana UI.
|
||||||
|
|
||||||
If you only see the `ent-search-generic-ingestion` pipeline, you will need to click *Copy and customize* to create index-specific pipelines.
|
If you only see the `search-default-ingestion` pipeline, you will need to click *Copy and customize* to create index-specific pipelines.
|
||||||
This will create the `{index_name}@ml-inference` pipeline.
|
This will create the `{index_name}@ml-inference` pipeline.
|
||||||
|
|
||||||
Once your index-specific ML inference pipeline is ready, you can add inference processors that use your ML trained models.
|
Once your index-specific ML inference pipeline is ready, you can add inference processors that use your ML trained models.
|
||||||
|
|
|
@ -40,7 +40,7 @@ Considerations such as error handling, conditional execution, sequencing, versio
|
||||||
To this end, when you create indices for search use cases, (including {enterprise-search-ref}/crawler.html[Elastic web crawler], <<es-connectors,connectors>>.
|
To this end, when you create indices for search use cases, (including {enterprise-search-ref}/crawler.html[Elastic web crawler], <<es-connectors,connectors>>.
|
||||||
, and API indices), each index already has a pipeline set up with several processors that optimize your content for search.
|
, and API indices), each index already has a pipeline set up with several processors that optimize your content for search.
|
||||||
|
|
||||||
This pipeline is called `ent-search-generic-ingestion`.
|
This pipeline is called `search-default-ingestion`.
|
||||||
While it is a "managed" pipeline (meaning it should not be tampered with), you can view its details via the Kibana UI or the Elasticsearch API.
|
While it is a "managed" pipeline (meaning it should not be tampered with), you can view its details via the Kibana UI or the Elasticsearch API.
|
||||||
You can also <<ingest-pipeline-search-details-generic-reference,read more about its contents below>>.
|
You can also <<ingest-pipeline-search-details-generic-reference,read more about its contents below>>.
|
||||||
|
|
||||||
|
@ -56,14 +56,14 @@ This will not effect existing indices.
|
||||||
|
|
||||||
Each index also provides the capability to easily create index-specific ingest pipelines with customizable processing.
|
Each index also provides the capability to easily create index-specific ingest pipelines with customizable processing.
|
||||||
If you need that extra flexibility, you can create a custom pipeline by going to your pipeline settings and choosing to "copy and customize".
|
If you need that extra flexibility, you can create a custom pipeline by going to your pipeline settings and choosing to "copy and customize".
|
||||||
This will replace the index's use of `ent-search-generic-ingestion` with 3 newly generated pipelines:
|
This will replace the index's use of `search-default-ingestion` with 3 newly generated pipelines:
|
||||||
|
|
||||||
1. `<index-name>`
|
1. `<index-name>`
|
||||||
2. `<index-name>@custom`
|
2. `<index-name>@custom`
|
||||||
3. `<index-name>@ml-inference`
|
3. `<index-name>@ml-inference`
|
||||||
|
|
||||||
Like `ent-search-generic-ingestion`, the first of these is "managed", but the other two can and should be modified to fit your needs.
|
Like `search-default-ingestion`, the first of these is "managed", but the other two can and should be modified to fit your needs.
|
||||||
You can view these pipelines using the platform tools (Kibana UI, Elasticsearch API), and can also
|
You can view these pipelines using the platform tools (Kibana UI, Elasticsearch API), and can also
|
||||||
<<ingest-pipeline-search-details-specific,read more about their content below>>.
|
<<ingest-pipeline-search-details-specific,read more about their content below>>.
|
||||||
|
|
||||||
[discrete#ingest-pipeline-search-pipeline-settings]
|
[discrete#ingest-pipeline-search-pipeline-settings]
|
||||||
|
@ -123,7 +123,7 @@ If the pipeline is not specified, the underscore-prefixed fields will actually b
|
||||||
=== Details
|
=== Details
|
||||||
|
|
||||||
[discrete#ingest-pipeline-search-details-generic-reference]
|
[discrete#ingest-pipeline-search-details-generic-reference]
|
||||||
==== `ent-search-generic-ingestion` Reference
|
==== `search-default-ingestion` Reference
|
||||||
|
|
||||||
You can access this pipeline with the <<get-pipeline-api, Elasticsearch Ingest Pipelines API>> or via Kibana's <<create-manage-ingest-pipelines,Stack Management > Ingest Pipelines>> UI.
|
You can access this pipeline with the <<get-pipeline-api, Elasticsearch Ingest Pipelines API>> or via Kibana's <<create-manage-ingest-pipelines,Stack Management > Ingest Pipelines>> UI.
|
||||||
|
|
||||||
|
@ -149,7 +149,7 @@ If you want to make customizations, we recommend you utilize index-specific pipe
|
||||||
[discrete#ingest-pipeline-search-details-generic-reference-params]
|
[discrete#ingest-pipeline-search-details-generic-reference-params]
|
||||||
===== Control flow parameters
|
===== Control flow parameters
|
||||||
|
|
||||||
The `ent-search-generic-ingestion` pipeline does not always run all processors.
|
The `search-default-ingestion` pipeline does not always run all processors.
|
||||||
It utilizes a feature of ingest pipelines to <<conditionally-run-processor,conditionally run processors>> based on the contents of each individual document.
|
It utilizes a feature of ingest pipelines to <<conditionally-run-processor,conditionally run processors>> based on the contents of each individual document.
|
||||||
|
|
||||||
* `_extract_binary_content` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `attachment`, `set_body`, and `remove_replacement_chars` processors.
|
* `_extract_binary_content` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `attachment`, `set_body`, and `remove_replacement_chars` processors.
|
||||||
|
@ -167,8 +167,8 @@ See <<ingest-pipeline-search-pipeline-settings>>.
|
||||||
==== Index-specific ingest pipelines
|
==== Index-specific ingest pipelines
|
||||||
|
|
||||||
In the Kibana UI for your index, by clicking on the Pipelines tab, then *Settings > Copy and customize*, you can quickly generate 3 pipelines which are specific to your index.
|
In the Kibana UI for your index, by clicking on the Pipelines tab, then *Settings > Copy and customize*, you can quickly generate 3 pipelines which are specific to your index.
|
||||||
These 3 pipelines replace `ent-search-generic-ingestion` for the index.
|
These 3 pipelines replace `search-default-ingestion` for the index.
|
||||||
There is nothing lost in this action, as the `<index-name>` pipeline is a superset of functionality over the `ent-search-generic-ingestion` pipeline.
|
There is nothing lost in this action, as the `<index-name>` pipeline is a superset of functionality over the `search-default-ingestion` pipeline.
|
||||||
|
|
||||||
[IMPORTANT]
|
[IMPORTANT]
|
||||||
====
|
====
|
||||||
|
@ -179,7 +179,7 @@ Refer to the Elastic subscriptions pages for https://www.elastic.co/subscription
|
||||||
[discrete#ingest-pipeline-search-details-specific-reference]
|
[discrete#ingest-pipeline-search-details-specific-reference]
|
||||||
===== `<index-name>` Reference
|
===== `<index-name>` Reference
|
||||||
|
|
||||||
This pipeline looks and behaves a lot like the <<ingest-pipeline-search-details-generic-reference,`ent-search-generic-ingestion` pipeline>>, but with <<ingest-pipeline-search-details-specific-reference-processors,two additional processors>>.
|
This pipeline looks and behaves a lot like the <<ingest-pipeline-search-details-generic-reference,`search-default-ingestion` pipeline>>, but with <<ingest-pipeline-search-details-specific-reference-processors,two additional processors>>.
|
||||||
|
|
||||||
[WARNING]
|
[WARNING]
|
||||||
=========================
|
=========================
|
||||||
|
@ -197,7 +197,7 @@ If you want to make customizations, we recommend you utilize <<ingest-pipeline-s
|
||||||
[discrete#ingest-pipeline-search-details-specific-reference-processors]
|
[discrete#ingest-pipeline-search-details-specific-reference-processors]
|
||||||
====== Processors
|
====== Processors
|
||||||
|
|
||||||
In addition to the processors inherited from the <<ingest-pipeline-search-details-generic-reference,`ent-search-generic-ingestion` pipeline>>, the index-specific pipeline also defines:
|
In addition to the processors inherited from the <<ingest-pipeline-search-details-generic-reference,`search-default-ingestion` pipeline>>, the index-specific pipeline also defines:
|
||||||
|
|
||||||
* `index_ml_inference_pipeline` - this uses the <<pipeline-processor, Pipeline>> processor to run the `<index-name>@ml-inference` pipeline.
|
* `index_ml_inference_pipeline` - this uses the <<pipeline-processor, Pipeline>> processor to run the `<index-name>@ml-inference` pipeline.
|
||||||
This processor will only be run if the source document includes a `_run_ml_inference` field with the value `true`.
|
This processor will only be run if the source document includes a `_run_ml_inference` field with the value `true`.
|
||||||
|
@ -206,7 +206,7 @@ In addition to the processors inherited from the <<ingest-pipeline-search-detail
|
||||||
[discrete#ingest-pipeline-search-details-specific-reference-params]
|
[discrete#ingest-pipeline-search-details-specific-reference-params]
|
||||||
====== Control flow parameters
|
====== Control flow parameters
|
||||||
|
|
||||||
Like the `ent-search-generic-ingestion` pipeline, the `<index-name>` pipeline does not always run all processors.
|
Like the `search-default-ingestion` pipeline, the `<index-name>` pipeline does not always run all processors.
|
||||||
In addition to the `_extract_binary_content` and `_reduce_whitespace` control flow parameters, the `<index-name>` pipeline also supports:
|
In addition to the `_extract_binary_content` and `_reduce_whitespace` control flow parameters, the `<index-name>` pipeline also supports:
|
||||||
|
|
||||||
* `_run_ml_inference` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `index_ml_inference_pipeline` processor.
|
* `_run_ml_inference` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `index_ml_inference_pipeline` processor.
|
||||||
|
@ -220,7 +220,7 @@ See <<ingest-pipeline-search-pipeline-settings>>.
|
||||||
===== `<index-name>@ml-inference` Reference
|
===== `<index-name>@ml-inference` Reference
|
||||||
|
|
||||||
This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines tab of your index, or from the *Stack Management > Ingest Pipelines* page.
|
This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines tab of your index, or from the *Stack Management > Ingest Pipelines* page.
|
||||||
Unlike the `ent-search-generic-ingestion` pipeline and the `<index-name>` pipeline, this pipeline is NOT "managed".
|
Unlike the `search-default-ingestion` pipeline and the `<index-name>` pipeline, this pipeline is NOT "managed".
|
||||||
|
|
||||||
It's possible to add one or more ML inference pipelines to an index in the *Content* UI.
|
It's possible to add one or more ML inference pipelines to an index in the *Content* UI.
|
||||||
This pipeline will serve as a container for all of the ML inference pipelines configured for the index.
|
This pipeline will serve as a container for all of the ML inference pipelines configured for the index.
|
||||||
|
@ -241,7 +241,7 @@ The `monitor_ml` Elasticsearch cluster permission is required in order to manage
|
||||||
|
|
||||||
This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines
|
This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines
|
||||||
tab of your index, or from the *Stack Management > Ingest Pipelines* page.
|
tab of your index, or from the *Stack Management > Ingest Pipelines* page.
|
||||||
Unlike the `ent-search-generic-ingestion` pipeline and the `<index-name>` pipeline, this pipeline is NOT "managed".
|
Unlike the `search-default-ingestion` pipeline and the `<index-name>` pipeline, this pipeline is NOT "managed".
|
||||||
|
|
||||||
You are encouraged to make additions and edits to this pipeline, provided its name remains the same.
|
You are encouraged to make additions and edits to this pipeline, provided its name remains the same.
|
||||||
This provides a convenient hook from which to add custom processing and transformations for your data.
|
This provides a convenient hook from which to add custom processing and transformations for your data.
|
||||||
|
@ -272,9 +272,12 @@ extraction.
|
||||||
These changes should be re-applied to each index's `<index-name>@custom` pipeline in order to ensure a consistent data processing experience.
|
These changes should be re-applied to each index's `<index-name>@custom` pipeline in order to ensure a consistent data processing experience.
|
||||||
In 8.5+, the <<ingest-pipeline-search-pipeline-settings, index setting to enable binary content>> is required *in addition* to the configurations mentioned in the {enterprise-search-ref}/crawler-managing.html#crawler-managing-binary-content[Elastic web crawler Guide].
|
In 8.5+, the <<ingest-pipeline-search-pipeline-settings, index setting to enable binary content>> is required *in addition* to the configurations mentioned in the {enterprise-search-ref}/crawler-managing.html#crawler-managing-binary-content[Elastic web crawler Guide].
|
||||||
|
|
||||||
* `ent-search-generic-ingestion` - Since 8.5, Native Connectors, Connector Clients, and new (>8.4) Elastic web crawler indices will all make use of this pipeline by default.
|
* `ent-search-generic-ingestion` - Since 8.5, Native Connectors, Connector Clients, and new (>8.4) Elastic web crawler indices all made use of this pipeline by default.
|
||||||
|
This pipeline evolved into the `search-default-ingestion` pipeline.
|
||||||
|
|
||||||
|
* `search-default-ingestion` - Since 9.0, Connectors have made use of this pipeline by default.
|
||||||
You can <<ingest-pipeline-search-details-generic-reference, read more about this pipeline>> above.
|
You can <<ingest-pipeline-search-details-generic-reference, read more about this pipeline>> above.
|
||||||
As this pipeline is "managed", any modifications that were made to `app_search_crawler` and/or `ent_search_crawler` should NOT be made to `ent-search-generic-ingestion`.
|
As this pipeline is "managed", any modifications that were made to `app_search_crawler` and/or `ent_search_crawler` should NOT be made to `search-default-ingestion`.
|
||||||
Instead, if such customizations are desired, you should utilize <<ingest-pipeline-search-details-specific>>, placing all modifications in the `<index-name>@custom` pipeline(s).
|
Instead, if such customizations are desired, you should utilize <<ingest-pipeline-search-details-specific>>, placing all modifications in the `<index-name>@custom` pipeline(s).
|
||||||
=============
|
=============
|
||||||
|
|
||||||
|
|
|
@ -164,8 +164,8 @@ Now it's time to create an inference pipeline.
|
||||||
|
|
||||||
1. From the overview page for your `search-photo-comments` index in "Search", click the *Pipelines* tab.
|
1. From the overview page for your `search-photo-comments` index in "Search", click the *Pipelines* tab.
|
||||||
By default, Elasticsearch does not create any index-specific ingest pipelines.
|
By default, Elasticsearch does not create any index-specific ingest pipelines.
|
||||||
2. Because we want to customize these pipelines, we need to *Copy and customize* the `ent-search-generic-ingestion` ingest pipeline.
|
2. Because we want to customize these pipelines, we need to *Copy and customize* the `search-default-ingestion` ingest pipeline.
|
||||||
Find this option above the settings for the `ent-search-generic-ingestion` ingest pipeline.
|
Find this option above the settings for the `search-default-ingestion` ingest pipeline.
|
||||||
This will create two new index-specific ingest pipelines.
|
This will create two new index-specific ingest pipelines.
|
||||||
|
|
||||||
Next, we'll add an inference pipeline.
|
Next, we'll add an inference pipeline.
|
||||||
|
|
|
@ -22,6 +22,9 @@ A <<standard-retriever, retriever>> that replaces the functionality of a traditi
|
||||||
`knn`::
|
`knn`::
|
||||||
A <<knn-retriever, retriever>> that replaces the functionality of a <<search-api-knn, knn search>>.
|
A <<knn-retriever, retriever>> that replaces the functionality of a <<search-api-knn, knn search>>.
|
||||||
|
|
||||||
|
`rescorer`::
|
||||||
|
A <<rescorer-retriever, retriever>> that replaces the functionality of the <<rescore, query rescorer>>.
|
||||||
|
|
||||||
`rrf`::
|
`rrf`::
|
||||||
A <<rrf-retriever, retriever>> that produces top documents from <<rrf, reciprocal rank fusion (RRF)>>.
|
A <<rrf-retriever, retriever>> that produces top documents from <<rrf, reciprocal rank fusion (RRF)>>.
|
||||||
|
|
||||||
|
@ -371,6 +374,122 @@ GET movies/_search
|
||||||
----
|
----
|
||||||
// TEST[skip:uses ELSER]
|
// TEST[skip:uses ELSER]
|
||||||
|
|
||||||
|
[[rescorer-retriever]]
|
||||||
|
==== Rescorer Retriever
|
||||||
|
|
||||||
|
The `rescorer` retriever re-scores only the results produced by its child retriever.
|
||||||
|
For the `standard` and `knn` retrievers, the `window_size` parameter specifies the number of documents examined per shard.
|
||||||
|
|
||||||
|
For compound retrievers like `rrf`, the `window_size` parameter defines the total number of documents examined globally.
|
||||||
|
|
||||||
|
When using the `rescorer`, an error is returned if the following conditions are not met:
|
||||||
|
|
||||||
|
* The minimum configured rescore's `window_size` is:
|
||||||
|
** Greater than or equal to the `size` of the parent retriever for nested `rescorer` setups.
|
||||||
|
** Greater than or equal to the `size` of the search request when used as the primary retriever in the tree.
|
||||||
|
|
||||||
|
* And the maximum rescore's `window_size` is:
|
||||||
|
** Smaller than or equal to the `size` or `rank_window_size` of the child retriever.
|
||||||
|
|
||||||
|
[discrete]
|
||||||
|
[[rescorer-retriever-parameters]]
|
||||||
|
===== Parameters
|
||||||
|
|
||||||
|
`rescore`::
|
||||||
|
(Required. <<rescore, A rescorer definition or an array of rescorer definitions>>)
|
||||||
|
+
|
||||||
|
Defines the <<rescore, rescorers>> applied sequentially to the top documents returned by the child retriever.
|
||||||
|
|
||||||
|
`retriever`::
|
||||||
|
(Required. <<retriever, retriever>>)
|
||||||
|
+
|
||||||
|
Specifies the child retriever responsible for generating the initial set of top documents to be re-ranked.
|
||||||
|
|
||||||
|
`filter`::
|
||||||
|
(Optional. <<query-dsl, query object or list of query objects>>)
|
||||||
|
+
|
||||||
|
Applies a <<query-dsl-bool-query, boolean query filter>> to the retriever, ensuring that all documents match the filter criteria without affecting their scores.
|
||||||
|
|
||||||
|
[discrete]
|
||||||
|
[[rescorer-retriever-example]]
|
||||||
|
==== Example
|
||||||
|
|
||||||
|
The `rescorer` retriever can be placed at any level within the retriever tree.
|
||||||
|
The following example demonstrates a `rescorer` applied to the results produced by an `rrf` retriever:
|
||||||
|
|
||||||
|
[source,console]
|
||||||
|
----
|
||||||
|
GET movies/_search
|
||||||
|
{
|
||||||
|
"size": 10, <1>
|
||||||
|
"retriever": {
|
||||||
|
"rescorer": { <2>
|
||||||
|
"rescore": {
|
||||||
|
"query": { <3>
|
||||||
|
"window_size": 50, <4>
|
||||||
|
"rescore_query": {
|
||||||
|
"script_score": {
|
||||||
|
"script": {
|
||||||
|
"source": "cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
|
||||||
|
"params": {
|
||||||
|
"queryVector": [-0.5, 90.0, -10, 14.8, -156.0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"retriever": { <5>
|
||||||
|
"rrf": {
|
||||||
|
"rank_window_size": 100, <6>
|
||||||
|
"retrievers": [
|
||||||
|
{
|
||||||
|
"standard": {
|
||||||
|
"query": {
|
||||||
|
"sparse_vector": {
|
||||||
|
"field": "plot_embedding",
|
||||||
|
"inference_id": "my-elser-model",
|
||||||
|
"query": "films that explore psychological depths"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"standard": {
|
||||||
|
"query": {
|
||||||
|
"multi_match": {
|
||||||
|
"query": "crime",
|
||||||
|
"fields": [
|
||||||
|
"plot",
|
||||||
|
"title"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"knn": {
|
||||||
|
"field": "vector",
|
||||||
|
"query_vector": [10, 22, 77],
|
||||||
|
"k": 10,
|
||||||
|
"num_candidates": 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
----
|
||||||
|
// TEST[skip:uses ELSER]
|
||||||
|
<1> Specifies the number of top documents to return in the final response.
|
||||||
|
<2> A `rescorer` retriever applied as the final step.
|
||||||
|
<3> The definition of the `query` rescorer.
|
||||||
|
<4> Defines the number of documents to rescore from the child retriever.
|
||||||
|
<5> Specifies the child retriever definition.
|
||||||
|
<6> Defines the number of documents returned by the `rrf` retriever, which limits the available documents to
|
||||||
|
|
||||||
[[text-similarity-reranker-retriever]]
|
[[text-similarity-reranker-retriever]]
|
||||||
==== Text Similarity Re-ranker Retriever
|
==== Text Similarity Re-ranker Retriever
|
||||||
|
|
||||||
|
@ -777,4 +896,4 @@ When a retriever is specified as part of a search, the following elements are no
|
||||||
* <<search-after, `search_after`>>
|
* <<search-after, `search_after`>>
|
||||||
* <<request-body-search-terminate-after, `terminate_after`>>
|
* <<request-body-search-terminate-after, `terminate_after`>>
|
||||||
* <<search-sort-param, `sort`>>
|
* <<search-sort-param, `sort`>>
|
||||||
* <<rescore, `rescore`>>
|
* <<rescore, `rescore`>> use a <<rescorer-retriever, rescorer retriever>> instead
|
||||||
|
|
|
@ -25,6 +25,21 @@ TIP: This setup doesn't run multiple {es} nodes or {kib} by default. To create a
|
||||||
multi-node cluster with {kib}, use Docker Compose instead. See
|
multi-node cluster with {kib}, use Docker Compose instead. See
|
||||||
<<docker-compose-file>>.
|
<<docker-compose-file>>.
|
||||||
|
|
||||||
|
[[docker-wolfi-hardened-image]]
|
||||||
|
===== Hardened Docker images
|
||||||
|
|
||||||
|
You can also use the hardened https://wolfi.dev/[Wolfi] image for additional security.
|
||||||
|
Using Wolfi images requires Docker version 20.10.10 or higher.
|
||||||
|
|
||||||
|
To use the Wolfi image, append `-wolfi` to the image tag in the Docker command.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
[source,sh,subs="attributes"]
|
||||||
|
----
|
||||||
|
docker pull {docker-wolfi-image}
|
||||||
|
----
|
||||||
|
|
||||||
===== Start a single-node cluster
|
===== Start a single-node cluster
|
||||||
|
|
||||||
. Install Docker. Visit https://docs.docker.com/get-docker/[Get Docker] to
|
. Install Docker. Visit https://docs.docker.com/get-docker/[Get Docker] to
|
||||||
|
@ -55,12 +70,6 @@ docker pull {docker-image}
|
||||||
// REVIEWED[DEC.10.24]
|
// REVIEWED[DEC.10.24]
|
||||||
--
|
--
|
||||||
|
|
||||||
Alternatevely, you can use the Wolfi based image. Using Wolfi based images requires Docker version 20.10.10 or superior.
|
|
||||||
[source,sh,subs="attributes"]
|
|
||||||
----
|
|
||||||
docker pull {docker-wolfi-image}
|
|
||||||
----
|
|
||||||
|
|
||||||
. Optional: Install
|
. Optional: Install
|
||||||
https://docs.sigstore.dev/cosign/system_config/installation/[Cosign] for your
|
https://docs.sigstore.dev/cosign/system_config/installation/[Cosign] for your
|
||||||
environment. Then use Cosign to verify the {es} image's signature.
|
environment. Then use Cosign to verify the {es} image's signature.
|
||||||
|
|
|
@ -11,6 +11,7 @@ package org.elasticsearch.entitlement.bridge;
|
||||||
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLStreamHandlerFactory;
|
import java.net.URLStreamHandlerFactory;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public interface EntitlementChecker {
|
public interface EntitlementChecker {
|
||||||
|
|
||||||
|
@ -29,4 +30,10 @@ public interface EntitlementChecker {
|
||||||
void check$java_net_URLClassLoader$(Class<?> callerClass, String name, URL[] urls, ClassLoader parent);
|
void check$java_net_URLClassLoader$(Class<?> callerClass, String name, URL[] urls, ClassLoader parent);
|
||||||
|
|
||||||
void check$java_net_URLClassLoader$(Class<?> callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory);
|
void check$java_net_URLClassLoader$(Class<?> callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory);
|
||||||
|
|
||||||
|
// Process creation
|
||||||
|
void check$$start(Class<?> callerClass, ProcessBuilder that, ProcessBuilder.Redirect[] redirects);
|
||||||
|
|
||||||
|
void check$java_lang_ProcessBuilder$startPipeline(Class<?> callerClass, List<ProcessBuilder> builders);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,43 +29,47 @@ import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static java.util.Map.entry;
|
import static java.util.Map.entry;
|
||||||
|
import static org.elasticsearch.entitlement.qa.common.RestEntitlementsCheckAction.CheckAction.deniedToPlugins;
|
||||||
|
import static org.elasticsearch.entitlement.qa.common.RestEntitlementsCheckAction.CheckAction.forPlugins;
|
||||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||||
|
|
||||||
public class RestEntitlementsCheckAction extends BaseRestHandler {
|
public class RestEntitlementsCheckAction extends BaseRestHandler {
|
||||||
private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class);
|
private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class);
|
||||||
private final String prefix;
|
private final String prefix;
|
||||||
|
|
||||||
private record CheckAction(Runnable action, boolean isServerOnly) {
|
record CheckAction(Runnable action, boolean isAlwaysDeniedToPlugins) {
|
||||||
|
/**
|
||||||
static CheckAction serverOnly(Runnable action) {
|
* These cannot be granted to plugins, so our test plugins cannot test the "allowed" case.
|
||||||
|
* Used both for always-denied entitlements as well as those granted only to the server itself.
|
||||||
|
*/
|
||||||
|
static CheckAction deniedToPlugins(Runnable action) {
|
||||||
return new CheckAction(action, true);
|
return new CheckAction(action, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
static CheckAction serverAndPlugin(Runnable action) {
|
static CheckAction forPlugins(Runnable action) {
|
||||||
return new CheckAction(action, false);
|
return new CheckAction(action, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Map<String, CheckAction> checkActions = Map.ofEntries(
|
private static final Map<String, CheckAction> checkActions = Map.ofEntries(
|
||||||
entry("runtime_exit", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeExit)),
|
entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)),
|
||||||
entry("runtime_halt", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeHalt)),
|
entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)),
|
||||||
entry("create_classloader", CheckAction.serverAndPlugin(RestEntitlementsCheckAction::createClassLoader))
|
entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)),
|
||||||
|
// entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)),
|
||||||
|
entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline))
|
||||||
);
|
);
|
||||||
|
|
||||||
@SuppressForbidden(reason = "Specifically testing Runtime.exit")
|
@SuppressForbidden(reason = "Specifically testing Runtime.exit")
|
||||||
private static void runtimeExit() {
|
private static void runtimeExit() {
|
||||||
logger.info("Calling Runtime.exit;");
|
|
||||||
Runtime.getRuntime().exit(123);
|
Runtime.getRuntime().exit(123);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressForbidden(reason = "Specifically testing Runtime.halt")
|
@SuppressForbidden(reason = "Specifically testing Runtime.halt")
|
||||||
private static void runtimeHalt() {
|
private static void runtimeHalt() {
|
||||||
logger.info("Calling Runtime.halt;");
|
|
||||||
Runtime.getRuntime().halt(123);
|
Runtime.getRuntime().halt(123);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void createClassLoader() {
|
private static void createClassLoader() {
|
||||||
logger.info("Calling new URLClassLoader");
|
|
||||||
try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) {
|
try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) {
|
||||||
logger.info("Created URLClassLoader [{}]", classLoader.getName());
|
logger.info("Created URLClassLoader [{}]", classLoader.getName());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -73,6 +77,18 @@ public class RestEntitlementsCheckAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void processBuilder_start() {
|
||||||
|
// TODO: processBuilder().start();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void processBuilder_startPipeline() {
|
||||||
|
try {
|
||||||
|
ProcessBuilder.startPipeline(List.of());
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public RestEntitlementsCheckAction(String prefix) {
|
public RestEntitlementsCheckAction(String prefix) {
|
||||||
this.prefix = prefix;
|
this.prefix = prefix;
|
||||||
}
|
}
|
||||||
|
@ -80,7 +96,7 @@ public class RestEntitlementsCheckAction extends BaseRestHandler {
|
||||||
public static Set<String> getServerAndPluginsCheckActions() {
|
public static Set<String> getServerAndPluginsCheckActions() {
|
||||||
return checkActions.entrySet()
|
return checkActions.entrySet()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(kv -> kv.getValue().isServerOnly() == false)
|
.filter(kv -> kv.getValue().isAlwaysDeniedToPlugins() == false)
|
||||||
.map(Map.Entry::getKey)
|
.map(Map.Entry::getKey)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
}
|
}
|
||||||
|
@ -112,6 +128,7 @@ public class RestEntitlementsCheckAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
return channel -> {
|
return channel -> {
|
||||||
|
logger.info("Calling check action [{}]", actionName);
|
||||||
checkAction.action().run();
|
checkAction.action().run();
|
||||||
channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName)));
|
channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName)));
|
||||||
};
|
};
|
||||||
|
|
|
@ -27,7 +27,6 @@ import java.util.function.Predicate;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
public class EntitlementAllowedNonModularPlugin extends Plugin implements ActionPlugin {
|
public class EntitlementAllowedNonModularPlugin extends Plugin implements ActionPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<RestHandler> getRestHandlers(
|
public List<RestHandler> getRestHandlers(
|
||||||
final Settings settings,
|
final Settings settings,
|
||||||
|
|
|
@ -27,7 +27,6 @@ import java.util.function.Predicate;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
public class EntitlementAllowedPlugin extends Plugin implements ActionPlugin {
|
public class EntitlementAllowedPlugin extends Plugin implements ActionPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<RestHandler> getRestHandlers(
|
public List<RestHandler> getRestHandlers(
|
||||||
final Settings settings,
|
final Settings settings,
|
||||||
|
|
|
@ -27,7 +27,6 @@ import java.util.function.Predicate;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
public class EntitlementDeniedNonModularPlugin extends Plugin implements ActionPlugin {
|
public class EntitlementDeniedNonModularPlugin extends Plugin implements ActionPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<RestHandler> getRestHandlers(
|
public List<RestHandler> getRestHandlers(
|
||||||
final Settings settings,
|
final Settings settings,
|
||||||
|
|
|
@ -27,7 +27,6 @@ import java.util.function.Predicate;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
public class EntitlementDeniedPlugin extends Plugin implements ActionPlugin {
|
public class EntitlementDeniedPlugin extends Plugin implements ActionPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<RestHandler> getRestHandlers(
|
public List<RestHandler> getRestHandlers(
|
||||||
final Settings settings,
|
final Settings settings,
|
||||||
|
|
|
@ -53,6 +53,7 @@ import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNN
|
||||||
public class EntitlementInitialization {
|
public class EntitlementInitialization {
|
||||||
|
|
||||||
private static final String POLICY_FILE_NAME = "entitlement-policy.yaml";
|
private static final String POLICY_FILE_NAME = "entitlement-policy.yaml";
|
||||||
|
private static final Module ENTITLEMENTS_MODULE = PolicyManager.class.getModule();
|
||||||
|
|
||||||
private static ElasticsearchEntitlementChecker manager;
|
private static ElasticsearchEntitlementChecker manager;
|
||||||
|
|
||||||
|
@ -92,7 +93,7 @@ public class EntitlementInitialization {
|
||||||
"server",
|
"server",
|
||||||
List.of(new Scope("org.elasticsearch.server", List.of(new ExitVMEntitlement(), new CreateClassLoaderEntitlement())))
|
List.of(new Scope("org.elasticsearch.server", List.of(new ExitVMEntitlement(), new CreateClassLoaderEntitlement())))
|
||||||
);
|
);
|
||||||
return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver());
|
return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver(), ENTITLEMENTS_MODULE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<String, Policy> createPluginPolicies(Collection<EntitlementBootstrap.PluginData> pluginData) throws IOException {
|
private static Map<String, Policy> createPluginPolicies(Collection<EntitlementBootstrap.PluginData> pluginData) throws IOException {
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.entitlement.runtime.policy.PolicyManager;
|
||||||
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLStreamHandlerFactory;
|
import java.net.URLStreamHandlerFactory;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementation of the {@link EntitlementChecker} interface, providing additional
|
* Implementation of the {@link EntitlementChecker} interface, providing additional
|
||||||
|
@ -67,4 +68,14 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
|
||||||
) {
|
) {
|
||||||
policyManager.checkCreateClassLoader(callerClass);
|
policyManager.checkCreateClassLoader(callerClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$$start(Class<?> callerClass, ProcessBuilder processBuilder, ProcessBuilder.Redirect[] redirects) {
|
||||||
|
policyManager.checkStartProcess(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_lang_ProcessBuilder$startPipeline(Class<?> callerClass, List<ProcessBuilder> builders) {
|
||||||
|
policyManager.checkStartProcess(callerClass);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
|
||||||
import org.elasticsearch.logging.LogManager;
|
import org.elasticsearch.logging.LogManager;
|
||||||
import org.elasticsearch.logging.Logger;
|
import org.elasticsearch.logging.Logger;
|
||||||
|
|
||||||
|
import java.lang.StackWalker.StackFrame;
|
||||||
import java.lang.module.ModuleFinder;
|
import java.lang.module.ModuleFinder;
|
||||||
import java.lang.module.ModuleReference;
|
import java.lang.module.ModuleReference;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -29,6 +30,10 @@ import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE;
|
||||||
|
import static java.util.Objects.requireNonNull;
|
||||||
|
import static java.util.function.Predicate.not;
|
||||||
|
|
||||||
public class PolicyManager {
|
public class PolicyManager {
|
||||||
private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class);
|
private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class);
|
||||||
|
|
||||||
|
@ -63,6 +68,11 @@ public class PolicyManager {
|
||||||
|
|
||||||
private static final Set<Module> systemModules = findSystemModules();
|
private static final Set<Module> systemModules = findSystemModules();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Frames originating from this module are ignored in the permission logic.
|
||||||
|
*/
|
||||||
|
private final Module entitlementsModule;
|
||||||
|
|
||||||
private static Set<Module> findSystemModules() {
|
private static Set<Module> findSystemModules() {
|
||||||
var systemModulesDescriptors = ModuleFinder.ofSystem()
|
var systemModulesDescriptors = ModuleFinder.ofSystem()
|
||||||
.findAll()
|
.findAll()
|
||||||
|
@ -77,19 +87,44 @@ public class PolicyManager {
|
||||||
.collect(Collectors.toUnmodifiableSet());
|
.collect(Collectors.toUnmodifiableSet());
|
||||||
}
|
}
|
||||||
|
|
||||||
public PolicyManager(Policy defaultPolicy, Map<String, Policy> pluginPolicies, Function<Class<?>, String> pluginResolver) {
|
public PolicyManager(
|
||||||
this.serverEntitlements = buildScopeEntitlementsMap(Objects.requireNonNull(defaultPolicy));
|
Policy defaultPolicy,
|
||||||
this.pluginsEntitlements = Objects.requireNonNull(pluginPolicies)
|
Map<String, Policy> pluginPolicies,
|
||||||
.entrySet()
|
Function<Class<?>, String> pluginResolver,
|
||||||
|
Module entitlementsModule
|
||||||
|
) {
|
||||||
|
this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(defaultPolicy));
|
||||||
|
this.pluginsEntitlements = requireNonNull(pluginPolicies).entrySet()
|
||||||
.stream()
|
.stream()
|
||||||
.collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue())));
|
.collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue())));
|
||||||
this.pluginResolver = pluginResolver;
|
this.pluginResolver = pluginResolver;
|
||||||
|
this.entitlementsModule = entitlementsModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<String, List<Entitlement>> buildScopeEntitlementsMap(Policy policy) {
|
private static Map<String, List<Entitlement>> buildScopeEntitlementsMap(Policy policy) {
|
||||||
return policy.scopes.stream().collect(Collectors.toUnmodifiableMap(scope -> scope.name, scope -> scope.entitlements));
|
return policy.scopes.stream().collect(Collectors.toUnmodifiableMap(scope -> scope.name, scope -> scope.entitlements));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void checkStartProcess(Class<?> callerClass) {
|
||||||
|
neverEntitled(callerClass, "start process");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void neverEntitled(Class<?> callerClass, String operationDescription) {
|
||||||
|
var requestingModule = requestingModule(callerClass);
|
||||||
|
if (isTriviallyAllowed(requestingModule)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new NotEntitledException(
|
||||||
|
Strings.format(
|
||||||
|
"Not entitled: caller [%s], module [%s], operation [%s]",
|
||||||
|
callerClass,
|
||||||
|
requestingModule.getName(),
|
||||||
|
operationDescription
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
public void checkExitVM(Class<?> callerClass) {
|
public void checkExitVM(Class<?> callerClass) {
|
||||||
checkEntitlementPresent(callerClass, ExitVMEntitlement.class);
|
checkEntitlementPresent(callerClass, ExitVMEntitlement.class);
|
||||||
}
|
}
|
||||||
|
@ -185,7 +220,16 @@ public class PolicyManager {
|
||||||
return requestingModule.isNamed() && requestingModule.getLayer() == ModuleLayer.boot();
|
return requestingModule.isNamed() && requestingModule.getLayer() == ModuleLayer.boot();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Module requestingModule(Class<?> callerClass) {
|
/**
|
||||||
|
* Walks the stack to determine which module's entitlements should be checked.
|
||||||
|
*
|
||||||
|
* @param callerClass when non-null will be used if its module is suitable;
|
||||||
|
* this is a fast-path check that can avoid the stack walk
|
||||||
|
* in cases where the caller class is available.
|
||||||
|
* @return the requesting module, or {@code null} if the entire call stack
|
||||||
|
* comes from modules that are trusted.
|
||||||
|
*/
|
||||||
|
Module requestingModule(Class<?> callerClass) {
|
||||||
if (callerClass != null) {
|
if (callerClass != null) {
|
||||||
Module callerModule = callerClass.getModule();
|
Module callerModule = callerClass.getModule();
|
||||||
if (systemModules.contains(callerModule) == false) {
|
if (systemModules.contains(callerModule) == false) {
|
||||||
|
@ -193,21 +237,34 @@ public class PolicyManager {
|
||||||
return callerModule;
|
return callerModule;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int framesToSkip = 1 // getCallingClass (this method)
|
Optional<Module> module = StackWalker.getInstance(RETAIN_CLASS_REFERENCE)
|
||||||
+ 1 // the checkXxx method
|
.walk(frames -> findRequestingModule(frames.map(StackFrame::getDeclaringClass)));
|
||||||
+ 1 // the runtime config method
|
|
||||||
+ 1 // the instrumented method
|
|
||||||
;
|
|
||||||
Optional<Module> module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE)
|
|
||||||
.walk(
|
|
||||||
s -> s.skip(framesToSkip)
|
|
||||||
.map(f -> f.getDeclaringClass().getModule())
|
|
||||||
.filter(m -> systemModules.contains(m) == false)
|
|
||||||
.findFirst()
|
|
||||||
);
|
|
||||||
return module.orElse(null);
|
return module.orElse(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a stream of classes corresponding to the frames from a {@link StackWalker},
|
||||||
|
* returns the module whose entitlements should be checked.
|
||||||
|
*
|
||||||
|
* @throws NullPointerException if the requesting module is {@code null}
|
||||||
|
*/
|
||||||
|
Optional<Module> findRequestingModule(Stream<Class<?>> classes) {
|
||||||
|
return classes.map(Objects::requireNonNull)
|
||||||
|
.map(PolicyManager::moduleOf)
|
||||||
|
.filter(m -> m != entitlementsModule) // Ignore the entitlements library itself
|
||||||
|
.filter(not(systemModules::contains)) // Skip trusted JDK modules
|
||||||
|
.findFirst();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Module moduleOf(Class<?> c) {
|
||||||
|
var result = c.getModule();
|
||||||
|
if (result == null) {
|
||||||
|
throw new NullPointerException("Entitlements system does not support non-modular class [" + c.getName() + "]");
|
||||||
|
} else {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean isTriviallyAllowed(Module requestingModule) {
|
private static boolean isTriviallyAllowed(Module requestingModule) {
|
||||||
if (requestingModule == null) {
|
if (requestingModule == null) {
|
||||||
logger.debug("Entitlement trivially allowed: entire call stack is in composed of classes in system modules");
|
logger.debug("Entitlement trivially allowed: entire call stack is in composed of classes in system modules");
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static java.util.Map.entry;
|
import static java.util.Map.entry;
|
||||||
import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED;
|
import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED;
|
||||||
|
@ -37,11 +38,14 @@ import static org.hamcrest.Matchers.sameInstance;
|
||||||
@ESTestCase.WithoutSecurityManager
|
@ESTestCase.WithoutSecurityManager
|
||||||
public class PolicyManagerTests extends ESTestCase {
|
public class PolicyManagerTests extends ESTestCase {
|
||||||
|
|
||||||
|
private static final Module NO_ENTITLEMENTS_MODULE = null;
|
||||||
|
|
||||||
public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() {
|
public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() {
|
||||||
var policyManager = new PolicyManager(
|
var policyManager = new PolicyManager(
|
||||||
createEmptyTestServerPolicy(),
|
createEmptyTestServerPolicy(),
|
||||||
Map.of("plugin1", createPluginPolicy("plugin.module")),
|
Map.of("plugin1", createPluginPolicy("plugin.module")),
|
||||||
c -> "plugin1"
|
c -> "plugin1",
|
||||||
|
NO_ENTITLEMENTS_MODULE
|
||||||
);
|
);
|
||||||
|
|
||||||
// Any class from the current module (unnamed) will do
|
// Any class from the current module (unnamed) will do
|
||||||
|
@ -62,7 +66,7 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() {
|
public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() {
|
||||||
var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1");
|
var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE);
|
||||||
|
|
||||||
// Any class from the current module (unnamed) will do
|
// Any class from the current module (unnamed) will do
|
||||||
var callerClass = this.getClass();
|
var callerClass = this.getClass();
|
||||||
|
@ -82,7 +86,7 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetEntitlementsFailureIsCached() {
|
public void testGetEntitlementsFailureIsCached() {
|
||||||
var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1");
|
var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE);
|
||||||
|
|
||||||
// Any class from the current module (unnamed) will do
|
// Any class from the current module (unnamed) will do
|
||||||
var callerClass = this.getClass();
|
var callerClass = this.getClass();
|
||||||
|
@ -103,7 +107,8 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
var policyManager = new PolicyManager(
|
var policyManager = new PolicyManager(
|
||||||
createEmptyTestServerPolicy(),
|
createEmptyTestServerPolicy(),
|
||||||
Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))),
|
Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))),
|
||||||
c -> "plugin2"
|
c -> "plugin2",
|
||||||
|
NO_ENTITLEMENTS_MODULE
|
||||||
);
|
);
|
||||||
|
|
||||||
// Any class from the current module (unnamed) will do
|
// Any class from the current module (unnamed) will do
|
||||||
|
@ -115,7 +120,7 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotFoundException {
|
public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotFoundException {
|
||||||
var policyManager = new PolicyManager(createTestServerPolicy("example"), Map.of(), c -> null);
|
var policyManager = new PolicyManager(createTestServerPolicy("example"), Map.of(), c -> null, NO_ENTITLEMENTS_MODULE);
|
||||||
|
|
||||||
// Tests do not run modular, so we cannot use a server class.
|
// Tests do not run modular, so we cannot use a server class.
|
||||||
// But we know that in production code the server module and its classes are in the boot layer.
|
// But we know that in production code the server module and its classes are in the boot layer.
|
||||||
|
@ -138,7 +143,7 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException {
|
public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException {
|
||||||
var policyManager = new PolicyManager(createTestServerPolicy("jdk.httpserver"), Map.of(), c -> null);
|
var policyManager = new PolicyManager(createTestServerPolicy("jdk.httpserver"), Map.of(), c -> null, NO_ENTITLEMENTS_MODULE);
|
||||||
|
|
||||||
// Tests do not run modular, so we cannot use a server class.
|
// Tests do not run modular, so we cannot use a server class.
|
||||||
// But we know that in production code the server module and its classes are in the boot layer.
|
// But we know that in production code the server module and its classes are in the boot layer.
|
||||||
|
@ -155,12 +160,13 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOException, ClassNotFoundException {
|
public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOException, ClassNotFoundException {
|
||||||
final Path home = createTempDir();
|
final Path home = createTempDir();
|
||||||
|
|
||||||
Path jar = creteMockPluginJar(home);
|
Path jar = createMockPluginJar(home);
|
||||||
|
|
||||||
var policyManager = new PolicyManager(
|
var policyManager = new PolicyManager(
|
||||||
createEmptyTestServerPolicy(),
|
createEmptyTestServerPolicy(),
|
||||||
Map.of("mock-plugin", createPluginPolicy("org.example.plugin")),
|
Map.of("mock-plugin", createPluginPolicy("org.example.plugin")),
|
||||||
c -> "mock-plugin"
|
c -> "mock-plugin",
|
||||||
|
NO_ENTITLEMENTS_MODULE
|
||||||
);
|
);
|
||||||
|
|
||||||
var layer = createLayerForJar(jar, "org.example.plugin");
|
var layer = createLayerForJar(jar, "org.example.plugin");
|
||||||
|
@ -179,7 +185,8 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
var policyManager = new PolicyManager(
|
var policyManager = new PolicyManager(
|
||||||
createEmptyTestServerPolicy(),
|
createEmptyTestServerPolicy(),
|
||||||
Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))),
|
Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))),
|
||||||
c -> "plugin2"
|
c -> "plugin2",
|
||||||
|
NO_ENTITLEMENTS_MODULE
|
||||||
);
|
);
|
||||||
|
|
||||||
// Any class from the current module (unnamed) will do
|
// Any class from the current module (unnamed) will do
|
||||||
|
@ -197,6 +204,73 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
assertThat(entitlementsAgain, sameInstance(cachedResult));
|
assertThat(entitlementsAgain, sameInstance(cachedResult));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testRequestingModuleFastPath() throws IOException, ClassNotFoundException {
|
||||||
|
var callerClass = makeClassInItsOwnModule();
|
||||||
|
assertEquals(callerClass.getModule(), policyManagerWithEntitlementsModule(NO_ENTITLEMENTS_MODULE).requestingModule(callerClass));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException {
|
||||||
|
var requestingClass = makeClassInItsOwnModule();
|
||||||
|
var runtimeClass = makeClassInItsOwnModule(); // A class in the entitlements library itself
|
||||||
|
var ignorableClass = makeClassInItsOwnModule();
|
||||||
|
var systemClass = Object.class;
|
||||||
|
|
||||||
|
var policyManager = policyManagerWithEntitlementsModule(runtimeClass.getModule());
|
||||||
|
|
||||||
|
var requestingModule = requestingClass.getModule();
|
||||||
|
|
||||||
|
assertEquals(
|
||||||
|
"Skip one system frame",
|
||||||
|
requestingModule,
|
||||||
|
policyManager.findRequestingModule(Stream.of(systemClass, requestingClass, ignorableClass)).orElse(null)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"Skip multiple system frames",
|
||||||
|
requestingModule,
|
||||||
|
policyManager.findRequestingModule(Stream.of(systemClass, systemClass, systemClass, requestingClass, ignorableClass))
|
||||||
|
.orElse(null)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"Skip system frame between runtime frames",
|
||||||
|
requestingModule,
|
||||||
|
policyManager.findRequestingModule(Stream.of(runtimeClass, systemClass, runtimeClass, requestingClass, ignorableClass))
|
||||||
|
.orElse(null)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"Skip runtime frame between system frames",
|
||||||
|
requestingModule,
|
||||||
|
policyManager.findRequestingModule(Stream.of(systemClass, runtimeClass, systemClass, requestingClass, ignorableClass))
|
||||||
|
.orElse(null)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"No system frames",
|
||||||
|
requestingModule,
|
||||||
|
policyManager.findRequestingModule(Stream.of(requestingClass, ignorableClass)).orElse(null)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"Skip runtime frames up to the first system frame",
|
||||||
|
requestingModule,
|
||||||
|
policyManager.findRequestingModule(Stream.of(runtimeClass, runtimeClass, systemClass, requestingClass, ignorableClass))
|
||||||
|
.orElse(null)
|
||||||
|
);
|
||||||
|
assertThrows(
|
||||||
|
"Non-modular caller frames are not supported",
|
||||||
|
NullPointerException.class,
|
||||||
|
() -> policyManager.findRequestingModule(Stream.of(systemClass, null))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Class<?> makeClassInItsOwnModule() throws IOException, ClassNotFoundException {
|
||||||
|
final Path home = createTempDir();
|
||||||
|
Path jar = createMockPluginJar(home);
|
||||||
|
var layer = createLayerForJar(jar, "org.example.plugin");
|
||||||
|
return layer.findLoader("org.example.plugin").loadClass("q.B");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static PolicyManager policyManagerWithEntitlementsModule(Module entitlementsModule) {
|
||||||
|
return new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "test", entitlementsModule);
|
||||||
|
}
|
||||||
|
|
||||||
private static Policy createEmptyTestServerPolicy() {
|
private static Policy createEmptyTestServerPolicy() {
|
||||||
return new Policy("server", List.of());
|
return new Policy("server", List.of());
|
||||||
}
|
}
|
||||||
|
@ -219,7 +293,7 @@ public class PolicyManagerTests extends ESTestCase {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Path creteMockPluginJar(Path home) throws IOException {
|
private static Path createMockPluginJar(Path home) throws IOException {
|
||||||
Path jar = home.resolve("mock-plugin.jar");
|
Path jar = home.resolve("mock-plugin.jar");
|
||||||
|
|
||||||
Map<String, CharSequence> sources = Map.ofEntries(
|
Map<String, CharSequence> sources = Map.ofEntries(
|
||||||
|
|
|
@ -96,6 +96,8 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info("Executing search");
|
logger.info("Executing search");
|
||||||
|
// we have to explicitly set error_trace=true for the later exception check for `TimeSeriesIndexSearcher`
|
||||||
|
client().threadPool().getThreadContext().putHeader("error_trace", "true");
|
||||||
TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg");
|
TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg");
|
||||||
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").setQuery(matchAllQuery())
|
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
|
|
|
@ -101,7 +101,12 @@ import org.apache.lucene.analysis.tr.ApostropheFilter;
|
||||||
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
|
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
|
||||||
import org.apache.lucene.analysis.util.ElisionFilter;
|
import org.apache.lucene.analysis.util.ElisionFilter;
|
||||||
import org.apache.lucene.util.SetOnce;
|
import org.apache.lucene.util.SetOnce;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationCategory;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.env.Environment;
|
||||||
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.IndexVersions;
|
import org.elasticsearch.index.IndexVersions;
|
||||||
import org.elasticsearch.index.analysis.AnalyzerProvider;
|
import org.elasticsearch.index.analysis.AnalyzerProvider;
|
||||||
import org.elasticsearch.index.analysis.CharFilterFactory;
|
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||||
|
@ -134,6 +139,8 @@ import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
|
||||||
|
|
||||||
public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, ScriptPlugin {
|
public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, ScriptPlugin {
|
||||||
|
|
||||||
|
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisPlugin.class);
|
||||||
|
|
||||||
private final SetOnce<ScriptService> scriptServiceHolder = new SetOnce<>();
|
private final SetOnce<ScriptService> scriptServiceHolder = new SetOnce<>();
|
||||||
private final SetOnce<SynonymsManagementAPIService> synonymsManagementServiceHolder = new SetOnce<>();
|
private final SetOnce<SynonymsManagementAPIService> synonymsManagementServiceHolder = new SetOnce<>();
|
||||||
|
|
||||||
|
@ -224,6 +231,28 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||||
filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
|
filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
|
||||||
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
|
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
|
||||||
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
|
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
|
||||||
|
filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||||
|
return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings) {
|
||||||
|
@Override
|
||||||
|
public TokenStream create(TokenStream tokenStream) {
|
||||||
|
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
|
||||||
|
+ "Please change the filter name to [edge_ngram] instead."
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"edgeNGram_deprecation",
|
||||||
|
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the filter name to [edge_ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return super.create(tokenStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
});
|
||||||
filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new));
|
filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new));
|
||||||
filters.put("fingerprint", FingerprintTokenFilterFactory::new);
|
filters.put("fingerprint", FingerprintTokenFilterFactory::new);
|
||||||
filters.put("flatten_graph", FlattenGraphTokenFilterFactory::new);
|
filters.put("flatten_graph", FlattenGraphTokenFilterFactory::new);
|
||||||
|
@ -243,6 +272,28 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||||
filters.put("min_hash", MinHashTokenFilterFactory::new);
|
filters.put("min_hash", MinHashTokenFilterFactory::new);
|
||||||
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
|
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
|
||||||
filters.put("ngram", NGramTokenFilterFactory::new);
|
filters.put("ngram", NGramTokenFilterFactory::new);
|
||||||
|
filters.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||||
|
return new NGramTokenFilterFactory(indexSettings, environment, name, settings) {
|
||||||
|
@Override
|
||||||
|
public TokenStream create(TokenStream tokenStream) {
|
||||||
|
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
|
||||||
|
+ "Please change the filter name to [ngram] instead."
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"nGram_deprecation",
|
||||||
|
"The [nGram] token filter name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the filter name to [ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return super.create(tokenStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
});
|
||||||
filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
|
filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
|
||||||
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new));
|
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new));
|
||||||
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
|
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
|
||||||
|
@ -294,7 +345,39 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||||
tokenizers.put("simple_pattern", SimplePatternTokenizerFactory::new);
|
tokenizers.put("simple_pattern", SimplePatternTokenizerFactory::new);
|
||||||
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
|
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
|
||||||
tokenizers.put("thai", ThaiTokenizerFactory::new);
|
tokenizers.put("thai", ThaiTokenizerFactory::new);
|
||||||
|
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||||
|
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"The [nGram] tokenizer name was deprecated in 7.6. "
|
||||||
|
+ "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead."
|
||||||
|
);
|
||||||
|
} else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) {
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"nGram_tokenizer_deprecation",
|
||||||
|
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the tokenizer name to [ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return new NGramTokenizerFactory(indexSettings, environment, name, settings);
|
||||||
|
});
|
||||||
tokenizers.put("ngram", NGramTokenizerFactory::new);
|
tokenizers.put("ngram", NGramTokenizerFactory::new);
|
||||||
|
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
|
||||||
|
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"The [edgeNGram] tokenizer name was deprecated in 7.6. "
|
||||||
|
+ "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead."
|
||||||
|
);
|
||||||
|
} else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) {
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"edgeNGram_tokenizer_deprecation",
|
||||||
|
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the tokenizer name to [edge_ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return new EdgeNGramTokenizerFactory(indexSettings, environment, name, settings);
|
||||||
|
});
|
||||||
tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new);
|
tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new);
|
||||||
tokenizers.put("char_group", CharGroupTokenizerFactory::new);
|
tokenizers.put("char_group", CharGroupTokenizerFactory::new);
|
||||||
tokenizers.put("classic", ClassicTokenizerFactory::new);
|
tokenizers.put("classic", ClassicTokenizerFactory::new);
|
||||||
|
@ -505,17 +588,53 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new));
|
tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new));
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new));
|
tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new));
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new));
|
tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new));
|
||||||
tokenizers.add(
|
tokenizers.add(PreConfiguredTokenizer.indexVersion("edge_ngram", (version) -> {
|
||||||
PreConfiguredTokenizer.indexVersion(
|
if (version.onOrAfter(IndexVersions.V_7_3_0)) {
|
||||||
"edge_ngram",
|
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||||
(version) -> new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE)
|
}
|
||||||
)
|
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
|
||||||
);
|
}));
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1)));
|
tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1)));
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new));
|
tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new));
|
||||||
// TODO deprecate and remove in API
|
// TODO deprecate and remove in API
|
||||||
// This is already broken with normalization, so backwards compat isn't necessary?
|
// This is already broken with normalization, so backwards compat isn't necessary?
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("lowercase", XLowerCaseTokenizer::new));
|
tokenizers.add(PreConfiguredTokenizer.singleton("lowercase", XLowerCaseTokenizer::new));
|
||||||
|
|
||||||
|
tokenizers.add(PreConfiguredTokenizer.indexVersion("nGram", (version) -> {
|
||||||
|
if (version.onOrAfter(IndexVersions.V_8_0_0)) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"The [nGram] tokenizer name was deprecated in 7.6. "
|
||||||
|
+ "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead."
|
||||||
|
);
|
||||||
|
} else if (version.onOrAfter(IndexVersions.V_7_6_0)) {
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"nGram_tokenizer_deprecation",
|
||||||
|
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the tokenizer name to [ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return new NGramTokenizer();
|
||||||
|
}));
|
||||||
|
tokenizers.add(PreConfiguredTokenizer.indexVersion("edgeNGram", (version) -> {
|
||||||
|
if (version.onOrAfter(IndexVersions.V_8_0_0)) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"The [edgeNGram] tokenizer name was deprecated in 7.6. "
|
||||||
|
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 8 or higher instead."
|
||||||
|
);
|
||||||
|
} else if (version.onOrAfter(IndexVersions.V_7_6_0)) {
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"edgeNGram_tokenizer_deprecation",
|
||||||
|
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the tokenizer name to [edge_ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (version.onOrAfter(IndexVersions.V_7_3_0)) {
|
||||||
|
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||||
|
}
|
||||||
|
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
|
||||||
|
}));
|
||||||
tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new));
|
tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new));
|
||||||
|
|
||||||
return tokenizers;
|
return tokenizers;
|
||||||
|
|
|
@ -0,0 +1,292 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.env.Environment;
|
||||||
|
import org.elasticsearch.index.IndexVersion;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
|
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
|
import org.elasticsearch.test.index.IndexVersionUtils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class CommonAnalysisPluginTests extends ESTestCase {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check that the deprecated "nGram" filter throws exception for indices created since 7.0.0 and
|
||||||
|
* logs a warning for earlier indices when the filter is used as a custom filter
|
||||||
|
*/
|
||||||
|
public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException {
|
||||||
|
final Settings settings = Settings.builder()
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(
|
||||||
|
IndexMetadata.SETTING_VERSION_CREATED,
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current())
|
||||||
|
)
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||||
|
.putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram")
|
||||||
|
.put("index.analysis.filter.my_ngram.type", "nGram")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
IllegalArgumentException ex = expectThrows(
|
||||||
|
IllegalArgumentException.class,
|
||||||
|
() -> createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
|
||||||
|
+ "Please change the filter name to [ngram] instead.",
|
||||||
|
ex.getMessage()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
final Settings settingsPre7 = Settings.builder()
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(
|
||||||
|
IndexMetadata.SETTING_VERSION_CREATED,
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0)
|
||||||
|
)
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||||
|
.putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram")
|
||||||
|
.put("index.analysis.filter.my_ngram.type", "nGram")
|
||||||
|
.build();
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin);
|
||||||
|
assertWarnings(
|
||||||
|
"The [nGram] token filter name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the filter name to [ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check that the deprecated "edgeNGram" filter throws exception for indices created since 7.0.0 and
|
||||||
|
* logs a warning for earlier indices when the filter is used as a custom filter
|
||||||
|
*/
|
||||||
|
public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOException {
|
||||||
|
final Settings settings = Settings.builder()
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(
|
||||||
|
IndexMetadata.SETTING_VERSION_CREATED,
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current())
|
||||||
|
)
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||||
|
.putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram")
|
||||||
|
.put("index.analysis.filter.my_ngram.type", "edgeNGram")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
IllegalArgumentException ex = expectThrows(
|
||||||
|
IllegalArgumentException.class,
|
||||||
|
() -> createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin)
|
||||||
|
);
|
||||||
|
assertEquals(
|
||||||
|
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
|
||||||
|
+ "Please change the filter name to [edge_ngram] instead.",
|
||||||
|
ex.getMessage()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
final Settings settingsPre7 = Settings.builder()
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(
|
||||||
|
IndexMetadata.SETTING_VERSION_CREATED,
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0)
|
||||||
|
)
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||||
|
.putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram")
|
||||||
|
.put("index.analysis.filter.my_ngram.type", "edgeNGram")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin);
|
||||||
|
assertWarnings(
|
||||||
|
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the filter name to [edge_ngram] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check that we log a deprecation warning for "nGram" and "edgeNGram" tokenizer names with 7.6 and
|
||||||
|
* disallow usages for indices created after 8.0
|
||||||
|
*/
|
||||||
|
public void testNGramTokenizerDeprecation() throws IOException {
|
||||||
|
// tests for prebuilt tokenizer
|
||||||
|
doTestPrebuiltTokenizerDeprecation(
|
||||||
|
"nGram",
|
||||||
|
"ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
doTestPrebuiltTokenizerDeprecation(
|
||||||
|
"edgeNGram",
|
||||||
|
"edge_ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
doTestPrebuiltTokenizerDeprecation(
|
||||||
|
"nGram",
|
||||||
|
"ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersions.V_7_6_0,
|
||||||
|
IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0))
|
||||||
|
),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
doTestPrebuiltTokenizerDeprecation(
|
||||||
|
"edgeNGram",
|
||||||
|
"edge_ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersions.V_7_6_0,
|
||||||
|
IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0))
|
||||||
|
),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
expectThrows(
|
||||||
|
IllegalArgumentException.class,
|
||||||
|
() -> doTestPrebuiltTokenizerDeprecation(
|
||||||
|
"nGram",
|
||||||
|
"ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()),
|
||||||
|
true
|
||||||
|
)
|
||||||
|
);
|
||||||
|
expectThrows(
|
||||||
|
IllegalArgumentException.class,
|
||||||
|
() -> doTestPrebuiltTokenizerDeprecation(
|
||||||
|
"edgeNGram",
|
||||||
|
"edge_ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()),
|
||||||
|
true
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// same batch of tests for custom tokenizer definition in the settings
|
||||||
|
doTestCustomTokenizerDeprecation(
|
||||||
|
"nGram",
|
||||||
|
"ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
doTestCustomTokenizerDeprecation(
|
||||||
|
"edgeNGram",
|
||||||
|
"edge_ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
doTestCustomTokenizerDeprecation(
|
||||||
|
"nGram",
|
||||||
|
"ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersions.V_7_6_0,
|
||||||
|
IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0))
|
||||||
|
),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
doTestCustomTokenizerDeprecation(
|
||||||
|
"edgeNGram",
|
||||||
|
"edge_ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersions.V_7_6_0,
|
||||||
|
IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0))
|
||||||
|
),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
expectThrows(
|
||||||
|
IllegalArgumentException.class,
|
||||||
|
() -> doTestCustomTokenizerDeprecation(
|
||||||
|
"nGram",
|
||||||
|
"ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()),
|
||||||
|
true
|
||||||
|
)
|
||||||
|
);
|
||||||
|
expectThrows(
|
||||||
|
IllegalArgumentException.class,
|
||||||
|
() -> doTestCustomTokenizerDeprecation(
|
||||||
|
"edgeNGram",
|
||||||
|
"edge_ngram",
|
||||||
|
IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()),
|
||||||
|
true
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, IndexVersion version, boolean expectWarning)
|
||||||
|
throws IOException {
|
||||||
|
final Settings settings = Settings.builder()
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
Map<String, TokenizerFactory> tokenizers = createTestAnalysis(
|
||||||
|
IndexSettingsModule.newIndexSettings("index", settings),
|
||||||
|
settings,
|
||||||
|
commonAnalysisPlugin
|
||||||
|
).tokenizer;
|
||||||
|
TokenizerFactory tokenizerFactory = tokenizers.get(deprecatedName);
|
||||||
|
|
||||||
|
Tokenizer tokenizer = tokenizerFactory.create();
|
||||||
|
assertNotNull(tokenizer);
|
||||||
|
if (expectWarning) {
|
||||||
|
assertWarnings(
|
||||||
|
"The ["
|
||||||
|
+ deprecatedName
|
||||||
|
+ "] tokenizer name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the tokenizer name to ["
|
||||||
|
+ replacement
|
||||||
|
+ "] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void doTestCustomTokenizerDeprecation(String deprecatedName, String replacement, IndexVersion version, boolean expectWarning)
|
||||||
|
throws IOException {
|
||||||
|
final Settings settings = Settings.builder()
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||||
|
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||||
|
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "my_tokenizer")
|
||||||
|
.put("index.analysis.tokenizer.my_tokenizer.type", deprecatedName)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||||
|
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin);
|
||||||
|
|
||||||
|
if (expectWarning) {
|
||||||
|
assertWarnings(
|
||||||
|
"The ["
|
||||||
|
+ deprecatedName
|
||||||
|
+ "] tokenizer name is deprecated and will be removed in a future version. "
|
||||||
|
+ "Please change the tokenizer name to ["
|
||||||
|
+ replacement
|
||||||
|
+ "] instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -34,7 +34,7 @@ import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTok
|
||||||
|
|
||||||
public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase {
|
public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase {
|
||||||
|
|
||||||
private static IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) throws IOException {
|
private IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) throws IOException {
|
||||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
|
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
|
||||||
Settings indexSettings = Settings.builder()
|
Settings indexSettings = Settings.builder()
|
||||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||||
|
@ -54,6 +54,7 @@ public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase {
|
||||||
assertNotNull(analyzer);
|
assertNotNull(analyzer);
|
||||||
assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" });
|
assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" });
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCustomTokenChars() throws IOException {
|
public void testCustomTokenChars() throws IOException {
|
||||||
|
|
|
@ -161,7 +161,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
|
||||||
for (int i = 0; i < iters; i++) {
|
for (int i = 0; i < iters; i++) {
|
||||||
final Index index = new Index("test", "_na_");
|
final Index index = new Index("test", "_na_");
|
||||||
final String name = "ngr";
|
final String name = "ngr";
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3);
|
Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3);
|
||||||
boolean reverse = random().nextBoolean();
|
boolean reverse = random().nextBoolean();
|
||||||
if (reverse) {
|
if (reverse) {
|
||||||
|
|
|
@ -56,7 +56,7 @@ public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase {
|
||||||
public void testPersianAnalyzerPreLucene10() throws IOException {
|
public void testPersianAnalyzerPreLucene10() throws IOException {
|
||||||
IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween(
|
IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween(
|
||||||
random(),
|
random(),
|
||||||
IndexVersionUtils.getFirstVersion(),
|
IndexVersionUtils.getLowestReadCompatibleVersion(),
|
||||||
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
||||||
);
|
);
|
||||||
Settings settings = ESTestCase.indexSettings(1, 1)
|
Settings settings = ESTestCase.indexSettings(1, 1)
|
||||||
|
|
|
@ -57,7 +57,7 @@ public class RomanianAnalyzerTests extends ESTokenStreamTestCase {
|
||||||
public void testRomanianAnalyzerPreLucene10() throws IOException {
|
public void testRomanianAnalyzerPreLucene10() throws IOException {
|
||||||
IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween(
|
IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween(
|
||||||
random(),
|
random(),
|
||||||
IndexVersionUtils.getFirstVersion(),
|
IndexVersionUtils.getLowestReadCompatibleVersion(),
|
||||||
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
||||||
);
|
);
|
||||||
Settings settings = ESTestCase.indexSettings(1, 1)
|
Settings settings = ESTestCase.indexSettings(1, 1)
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||||
public void testEnglishFilterFactory() throws IOException {
|
public void testEnglishFilterFactory() throws IOException {
|
||||||
int iters = scaledRandomIntBetween(20, 100);
|
int iters = scaledRandomIntBetween(20, 100);
|
||||||
for (int i = 0; i < iters; i++) {
|
for (int i = 0; i < iters; i++) {
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.my_english.type", "stemmer")
|
.put("index.analysis.filter.my_english.type", "stemmer")
|
||||||
.put("index.analysis.filter.my_english.language", "english")
|
.put("index.analysis.filter.my_english.language", "english")
|
||||||
|
@ -66,7 +66,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||||
int iters = scaledRandomIntBetween(20, 100);
|
int iters = scaledRandomIntBetween(20, 100);
|
||||||
for (int i = 0; i < iters; i++) {
|
for (int i = 0; i < iters; i++) {
|
||||||
|
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.my_porter2.type", "stemmer")
|
.put("index.analysis.filter.my_porter2.type", "stemmer")
|
||||||
.put("index.analysis.filter.my_porter2.language", "porter2")
|
.put("index.analysis.filter.my_porter2.language", "porter2")
|
||||||
|
@ -90,7 +90,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultipleLanguagesThrowsException() throws IOException {
|
public void testMultipleLanguagesThrowsException() throws IOException {
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.my_english.type", "stemmer")
|
.put("index.analysis.filter.my_english.type", "stemmer")
|
||||||
.putList("index.analysis.filter.my_english.language", "english", "light_english")
|
.putList("index.analysis.filter.my_english.language", "english", "light_english")
|
||||||
|
@ -142,7 +142,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testKpDeprecation() throws IOException {
|
public void testKpDeprecation() throws IOException {
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.my_kp.type", "stemmer")
|
.put("index.analysis.filter.my_kp.type", "stemmer")
|
||||||
.put("index.analysis.filter.my_kp.language", "kp")
|
.put("index.analysis.filter.my_kp.language", "kp")
|
||||||
|
@ -155,7 +155,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLovinsDeprecation() throws IOException {
|
public void testLovinsDeprecation() throws IOException {
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.my_lovins.type", "stemmer")
|
.put("index.analysis.filter.my_lovins.type", "stemmer")
|
||||||
.put("index.analysis.filter.my_lovins.language", "lovins")
|
.put("index.analysis.filter.my_lovins.language", "lovins")
|
||||||
|
|
|
@ -9,8 +9,10 @@
|
||||||
|
|
||||||
package org.elasticsearch.painless.spi;
|
package org.elasticsearch.painless.spi;
|
||||||
|
|
||||||
|
import org.elasticsearch.ResourceNotFoundException;
|
||||||
import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser;
|
import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser;
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.LineNumberReader;
|
import java.io.LineNumberReader;
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
|
@ -140,7 +142,7 @@ public final class WhitelistLoader {
|
||||||
* }
|
* }
|
||||||
* }
|
* }
|
||||||
*/
|
*/
|
||||||
public static Whitelist loadFromResourceFiles(Class<?> resource, Map<String, WhitelistAnnotationParser> parsers, String... filepaths) {
|
public static Whitelist loadFromResourceFiles(Class<?> owner, Map<String, WhitelistAnnotationParser> parsers, String... filepaths) {
|
||||||
List<WhitelistClass> whitelistClasses = new ArrayList<>();
|
List<WhitelistClass> whitelistClasses = new ArrayList<>();
|
||||||
List<WhitelistMethod> whitelistStatics = new ArrayList<>();
|
List<WhitelistMethod> whitelistStatics = new ArrayList<>();
|
||||||
List<WhitelistClassBinding> whitelistClassBindings = new ArrayList<>();
|
List<WhitelistClassBinding> whitelistClassBindings = new ArrayList<>();
|
||||||
|
@ -153,7 +155,7 @@ public final class WhitelistLoader {
|
||||||
|
|
||||||
try (
|
try (
|
||||||
LineNumberReader reader = new LineNumberReader(
|
LineNumberReader reader = new LineNumberReader(
|
||||||
new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8)
|
new InputStreamReader(getResourceAsStream(owner, filepath), StandardCharsets.UTF_8)
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
|
|
||||||
|
@ -483,16 +485,40 @@ public final class WhitelistLoader {
|
||||||
if (javaClassName != null) {
|
if (javaClassName != null) {
|
||||||
throw new IllegalArgumentException("invalid definition: expected closing bracket");
|
throw new IllegalArgumentException("invalid definition: expected closing bracket");
|
||||||
}
|
}
|
||||||
|
} catch (ResourceNotFoundException e) {
|
||||||
|
throw e; // rethrow
|
||||||
} catch (Exception exception) {
|
} catch (Exception exception) {
|
||||||
throw new RuntimeException("error in [" + filepath + "] at line [" + number + "]", exception);
|
throw new RuntimeException("error in [" + filepath + "] at line [" + number + "]", exception);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>) resource::getClassLoader);
|
ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>) owner::getClassLoader);
|
||||||
|
|
||||||
return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList());
|
return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static InputStream getResourceAsStream(Class<?> owner, String name) {
|
||||||
|
InputStream stream = owner.getResourceAsStream(name);
|
||||||
|
if (stream == null) {
|
||||||
|
String msg = "Whitelist file ["
|
||||||
|
+ owner.getPackageName().replace(".", "/")
|
||||||
|
+ "/"
|
||||||
|
+ name
|
||||||
|
+ "] not found from owning class ["
|
||||||
|
+ owner.getName()
|
||||||
|
+ "].";
|
||||||
|
if (owner.getModule().isNamed()) {
|
||||||
|
msg += " Check that the file exists and the package ["
|
||||||
|
+ owner.getPackageName()
|
||||||
|
+ "] is opened "
|
||||||
|
+ "to module "
|
||||||
|
+ WhitelistLoader.class.getModule().getName();
|
||||||
|
}
|
||||||
|
throw new ResourceNotFoundException(msg);
|
||||||
|
}
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
private static List<Object> parseWhitelistAnnotations(Map<String, WhitelistAnnotationParser> parsers, String line) {
|
private static List<Object> parseWhitelistAnnotations(Map<String, WhitelistAnnotationParser> parsers, String line) {
|
||||||
|
|
||||||
List<Object> annotations;
|
List<Object> annotations;
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.painless;
|
package org.elasticsearch.painless;
|
||||||
|
|
||||||
|
import org.elasticsearch.ResourceNotFoundException;
|
||||||
import org.elasticsearch.painless.spi.Whitelist;
|
import org.elasticsearch.painless.spi.Whitelist;
|
||||||
import org.elasticsearch.painless.spi.WhitelistClass;
|
import org.elasticsearch.painless.spi.WhitelistClass;
|
||||||
import org.elasticsearch.painless.spi.WhitelistLoader;
|
import org.elasticsearch.painless.spi.WhitelistLoader;
|
||||||
|
@ -17,10 +18,18 @@ import org.elasticsearch.painless.spi.annotation.DeprecatedAnnotation;
|
||||||
import org.elasticsearch.painless.spi.annotation.NoImportAnnotation;
|
import org.elasticsearch.painless.spi.annotation.NoImportAnnotation;
|
||||||
import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser;
|
import org.elasticsearch.painless.spi.annotation.WhitelistAnnotationParser;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.test.compiler.InMemoryJavaCompiler;
|
||||||
|
import org.elasticsearch.test.jar.JarUtils;
|
||||||
|
|
||||||
|
import java.lang.ModuleLayer.Controller;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Path;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
public class WhitelistLoaderTests extends ESTestCase {
|
public class WhitelistLoaderTests extends ESTestCase {
|
||||||
|
|
||||||
public void testUnknownAnnotations() {
|
public void testUnknownAnnotations() {
|
||||||
|
@ -96,4 +105,52 @@ public class WhitelistLoaderTests extends ESTestCase {
|
||||||
|
|
||||||
assertEquals(3, count);
|
assertEquals(3, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testMissingWhitelistResource() {
|
||||||
|
var e = expectThrows(ResourceNotFoundException.class, () -> WhitelistLoader.loadFromResourceFiles(Whitelist.class, "missing.txt"));
|
||||||
|
assertThat(
|
||||||
|
e.getMessage(),
|
||||||
|
equalTo(
|
||||||
|
"Whitelist file [org/elasticsearch/painless/spi/missing.txt] not found"
|
||||||
|
+ " from owning class [org.elasticsearch.painless.spi.Whitelist]."
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMissingWhitelistResourceInModule() throws Exception {
|
||||||
|
Map<String, CharSequence> sources = new HashMap<>();
|
||||||
|
sources.put("module-info", "module m {}");
|
||||||
|
sources.put("p.TestOwner", "package p; public class TestOwner { }");
|
||||||
|
var classToBytes = InMemoryJavaCompiler.compile(sources);
|
||||||
|
|
||||||
|
Path dir = createTempDir(getTestName());
|
||||||
|
Path jar = dir.resolve("m.jar");
|
||||||
|
Map<String, byte[]> jarEntries = new HashMap<>();
|
||||||
|
jarEntries.put("module-info.class", classToBytes.get("module-info"));
|
||||||
|
jarEntries.put("p/TestOwner.class", classToBytes.get("p.TestOwner"));
|
||||||
|
jarEntries.put("p/resource.txt", "# test resource".getBytes(StandardCharsets.UTF_8));
|
||||||
|
JarUtils.createJarWithEntries(jar, jarEntries);
|
||||||
|
|
||||||
|
try (var loader = JarUtils.loadJar(jar)) {
|
||||||
|
Controller controller = JarUtils.loadModule(jar, loader.classloader(), "m");
|
||||||
|
Module module = controller.layer().findModule("m").orElseThrow();
|
||||||
|
|
||||||
|
Class<?> ownerClass = module.getClassLoader().loadClass("p.TestOwner");
|
||||||
|
|
||||||
|
// first check we get a nice error message when accessing the resource
|
||||||
|
var e = expectThrows(ResourceNotFoundException.class, () -> WhitelistLoader.loadFromResourceFiles(ownerClass, "resource.txt"));
|
||||||
|
assertThat(
|
||||||
|
e.getMessage(),
|
||||||
|
equalTo(
|
||||||
|
"Whitelist file [p/resource.txt] not found from owning class [p.TestOwner]."
|
||||||
|
+ " Check that the file exists and the package [p] is opened to module null"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// now check we can actually read it once the package is opened to us
|
||||||
|
controller.addOpens(module, "p", WhitelistLoader.class.getModule());
|
||||||
|
var whitelist = WhitelistLoader.loadFromResourceFiles(ownerClass, "resource.txt");
|
||||||
|
assertThat(whitelist, notNullValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -238,8 +238,6 @@ tests:
|
||||||
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
|
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
|
||||||
method: test {p0=data_stream/120_data_streams_stats/Multiple data stream}
|
method: test {p0=data_stream/120_data_streams_stats/Multiple data stream}
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/118217
|
issue: https://github.com/elastic/elasticsearch/issues/118217
|
||||||
- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT
|
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/118224
|
|
||||||
- class: org.elasticsearch.packaging.test.ArchiveTests
|
- class: org.elasticsearch.packaging.test.ArchiveTests
|
||||||
method: test60StartAndStop
|
method: test60StartAndStop
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/118216
|
issue: https://github.com/elastic/elasticsearch/issues/118216
|
||||||
|
@ -290,6 +288,17 @@ tests:
|
||||||
- class: org.elasticsearch.cluster.service.MasterServiceTests
|
- class: org.elasticsearch.cluster.service.MasterServiceTests
|
||||||
method: testThreadContext
|
method: testThreadContext
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/118914
|
issue: https://github.com/elastic/elasticsearch/issues/118914
|
||||||
|
- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT
|
||||||
|
method: test {yaml=indices.create/20_synthetic_source/create index with use_synthetic_source}
|
||||||
|
issue: https://github.com/elastic/elasticsearch/issues/118955
|
||||||
|
- class: org.elasticsearch.repositories.blobstore.testkit.analyze.SecureHdfsRepositoryAnalysisRestIT
|
||||||
|
issue: https://github.com/elastic/elasticsearch/issues/118970
|
||||||
|
- class: org.elasticsearch.xpack.security.authc.AuthenticationServiceTests
|
||||||
|
method: testInvalidToken
|
||||||
|
issue: https://github.com/elastic/elasticsearch/issues/119019
|
||||||
|
- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
|
||||||
|
method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set}
|
||||||
|
issue: https://github.com/elastic/elasticsearch/issues/116777
|
||||||
|
|
||||||
# Examples:
|
# Examples:
|
||||||
#
|
#
|
||||||
|
|
|
@ -0,0 +1,175 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.http;
|
||||||
|
|
||||||
|
import org.apache.http.entity.ContentType;
|
||||||
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
|
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||||
|
import org.elasticsearch.action.search.SearchRequest;
|
||||||
|
import org.elasticsearch.client.Request;
|
||||||
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
|
import org.elasticsearch.transport.TransportMessageListener;
|
||||||
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery;
|
||||||
|
|
||||||
|
public class SearchErrorTraceIT extends HttpSmokeTestCase {
|
||||||
|
private AtomicBoolean hasStackTrace;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
private void setupMessageListener() {
|
||||||
|
internalCluster().getDataNodeInstances(TransportService.class).forEach(ts -> {
|
||||||
|
ts.addMessageListener(new TransportMessageListener() {
|
||||||
|
@Override
|
||||||
|
public void onResponseSent(long requestId, String action, Exception error) {
|
||||||
|
TransportMessageListener.super.onResponseSent(requestId, action, error);
|
||||||
|
if (action.startsWith("indices:data/read/search")) {
|
||||||
|
Optional<Throwable> throwable = ExceptionsHelper.unwrapCausesAndSuppressed(
|
||||||
|
error,
|
||||||
|
t -> t.getStackTrace().length > 0
|
||||||
|
);
|
||||||
|
hasStackTrace.set(throwable.isPresent());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setupIndexWithDocs() {
|
||||||
|
createIndex("test1", "test2");
|
||||||
|
indexRandom(
|
||||||
|
true,
|
||||||
|
prepareIndex("test1").setId("1").setSource("field", "foo"),
|
||||||
|
prepareIndex("test2").setId("10").setSource("field", 5)
|
||||||
|
);
|
||||||
|
refresh();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSearchFailingQueryErrorTraceDefault() throws IOException {
|
||||||
|
hasStackTrace = new AtomicBoolean();
|
||||||
|
setupIndexWithDocs();
|
||||||
|
|
||||||
|
Request searchRequest = new Request("POST", "/_search");
|
||||||
|
searchRequest.setJsonEntity("""
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"simple_query_string" : {
|
||||||
|
"query": "foo",
|
||||||
|
"fields": ["field"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""");
|
||||||
|
getRestClient().performRequest(searchRequest);
|
||||||
|
assertFalse(hasStackTrace.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSearchFailingQueryErrorTraceTrue() throws IOException {
|
||||||
|
hasStackTrace = new AtomicBoolean();
|
||||||
|
setupIndexWithDocs();
|
||||||
|
|
||||||
|
Request searchRequest = new Request("POST", "/_search");
|
||||||
|
searchRequest.setJsonEntity("""
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"simple_query_string" : {
|
||||||
|
"query": "foo",
|
||||||
|
"fields": ["field"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""");
|
||||||
|
searchRequest.addParameter("error_trace", "true");
|
||||||
|
getRestClient().performRequest(searchRequest);
|
||||||
|
assertTrue(hasStackTrace.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSearchFailingQueryErrorTraceFalse() throws IOException {
|
||||||
|
hasStackTrace = new AtomicBoolean();
|
||||||
|
setupIndexWithDocs();
|
||||||
|
|
||||||
|
Request searchRequest = new Request("POST", "/_search");
|
||||||
|
searchRequest.setJsonEntity("""
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"simple_query_string" : {
|
||||||
|
"query": "foo",
|
||||||
|
"fields": ["field"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""");
|
||||||
|
searchRequest.addParameter("error_trace", "false");
|
||||||
|
getRestClient().performRequest(searchRequest);
|
||||||
|
assertFalse(hasStackTrace.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultiSearchFailingQueryErrorTraceDefault() throws IOException {
|
||||||
|
hasStackTrace = new AtomicBoolean();
|
||||||
|
setupIndexWithDocs();
|
||||||
|
|
||||||
|
XContentType contentType = XContentType.JSON;
|
||||||
|
MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add(
|
||||||
|
new SearchRequest("test*").source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo").field("field")))
|
||||||
|
);
|
||||||
|
Request searchRequest = new Request("POST", "/_msearch");
|
||||||
|
byte[] requestBody = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, contentType.xContent());
|
||||||
|
searchRequest.setEntity(
|
||||||
|
new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null))
|
||||||
|
);
|
||||||
|
getRestClient().performRequest(searchRequest);
|
||||||
|
assertFalse(hasStackTrace.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultiSearchFailingQueryErrorTraceTrue() throws IOException {
|
||||||
|
hasStackTrace = new AtomicBoolean();
|
||||||
|
setupIndexWithDocs();
|
||||||
|
|
||||||
|
XContentType contentType = XContentType.JSON;
|
||||||
|
MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add(
|
||||||
|
new SearchRequest("test*").source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo").field("field")))
|
||||||
|
);
|
||||||
|
Request searchRequest = new Request("POST", "/_msearch");
|
||||||
|
byte[] requestBody = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, contentType.xContent());
|
||||||
|
searchRequest.setEntity(
|
||||||
|
new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null))
|
||||||
|
);
|
||||||
|
searchRequest.addParameter("error_trace", "true");
|
||||||
|
getRestClient().performRequest(searchRequest);
|
||||||
|
assertTrue(hasStackTrace.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultiSearchFailingQueryErrorTraceFalse() throws IOException {
|
||||||
|
hasStackTrace = new AtomicBoolean();
|
||||||
|
setupIndexWithDocs();
|
||||||
|
|
||||||
|
XContentType contentType = XContentType.JSON;
|
||||||
|
MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add(
|
||||||
|
new SearchRequest("test*").source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo").field("field")))
|
||||||
|
);
|
||||||
|
Request searchRequest = new Request("POST", "/_msearch");
|
||||||
|
byte[] requestBody = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, contentType.xContent());
|
||||||
|
searchRequest.setEntity(
|
||||||
|
new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null))
|
||||||
|
);
|
||||||
|
searchRequest.addParameter("error_trace", "false");
|
||||||
|
getRestClient().performRequest(searchRequest);
|
||||||
|
|
||||||
|
assertFalse(hasStackTrace.get());
|
||||||
|
}
|
||||||
|
}
|
|
@ -70,4 +70,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task ->
|
||||||
task.skipTest("search.vectors/41_knn_search_bbq_hnsw/Test knn search", "Scoring has changed in latest versions")
|
task.skipTest("search.vectors/41_knn_search_bbq_hnsw/Test knn search", "Scoring has changed in latest versions")
|
||||||
task.skipTest("search.vectors/42_knn_search_bbq_flat/Test knn search", "Scoring has changed in latest versions")
|
task.skipTest("search.vectors/42_knn_search_bbq_flat/Test knn search", "Scoring has changed in latest versions")
|
||||||
task.skipTest("synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set", "Can't work until auto-expand replicas is 0-1 for synonyms index")
|
task.skipTest("synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set", "Can't work until auto-expand replicas is 0-1 for synonyms index")
|
||||||
|
task.skipTest("search/90_search_after/_shard_doc sort", "restriction has been lifted in latest versions")
|
||||||
})
|
})
|
||||||
|
|
|
@ -2012,3 +2012,43 @@ synthetic_source with copy_to pointing inside dynamic object:
|
||||||
hits.hits.2.fields:
|
hits.hits.2.fields:
|
||||||
c.copy.keyword: [ "hello", "zap" ]
|
c.copy.keyword: [ "hello", "zap" ]
|
||||||
|
|
||||||
|
---
|
||||||
|
create index with use_synthetic_source:
|
||||||
|
- requires:
|
||||||
|
cluster_features: ["mapper.synthetic_recovery_source"]
|
||||||
|
reason: requires synthetic recovery source
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index:
|
||||||
|
recovery:
|
||||||
|
use_synthetic_source: true
|
||||||
|
mapping:
|
||||||
|
source:
|
||||||
|
mode: synthetic
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.get_settings: {}
|
||||||
|
- match: { test.settings.index.mapping.source.mode: synthetic}
|
||||||
|
- is_true: test.settings.index.recovery.use_synthetic_source
|
||||||
|
|
||||||
|
- do:
|
||||||
|
bulk:
|
||||||
|
index: test
|
||||||
|
refresh: true
|
||||||
|
body:
|
||||||
|
- '{ "create": { } }'
|
||||||
|
- '{ "field": "aaaa" }'
|
||||||
|
- '{ "create": { } }'
|
||||||
|
- '{ "field": "bbbb" }'
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.disk_usage:
|
||||||
|
index: test
|
||||||
|
run_expensive_tasks: true
|
||||||
|
flush: false
|
||||||
|
- gt: { test.store_size_in_bytes: 0 }
|
||||||
|
- is_false: test.fields._recovery_source
|
||||||
|
|
|
@ -0,0 +1,225 @@
|
||||||
|
setup:
|
||||||
|
- requires:
|
||||||
|
cluster_features: [ "search.retriever.rescorer.enabled" ]
|
||||||
|
reason: "Support for rescorer retriever"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
number_of_shards: 1
|
||||||
|
number_of_replicas: 0
|
||||||
|
mappings:
|
||||||
|
properties:
|
||||||
|
available:
|
||||||
|
type: boolean
|
||||||
|
features:
|
||||||
|
type: rank_features
|
||||||
|
|
||||||
|
- do:
|
||||||
|
bulk:
|
||||||
|
refresh: true
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
- '{"index": {"_id": 1 }}'
|
||||||
|
- '{"features": { "first_stage": 1, "second_stage": 10}, "available": true, "group": 1}'
|
||||||
|
- '{"index": {"_id": 2 }}'
|
||||||
|
- '{"features": { "first_stage": 2, "second_stage": 9}, "available": false, "group": 1}'
|
||||||
|
- '{"index": {"_id": 3 }}'
|
||||||
|
- '{"features": { "first_stage": 3, "second_stage": 8}, "available": false, "group": 3}'
|
||||||
|
- '{"index": {"_id": 4 }}'
|
||||||
|
- '{"features": { "first_stage": 4, "second_stage": 7}, "available": true, "group": 1}'
|
||||||
|
- '{"index": {"_id": 5 }}'
|
||||||
|
- '{"features": { "first_stage": 5, "second_stage": 6}, "available": true, "group": 3}'
|
||||||
|
- '{"index": {"_id": 6 }}'
|
||||||
|
- '{"features": { "first_stage": 6, "second_stage": 5}, "available": false, "group": 2}'
|
||||||
|
- '{"index": {"_id": 7 }}'
|
||||||
|
- '{"features": { "first_stage": 7, "second_stage": 4}, "available": true, "group": 3}'
|
||||||
|
- '{"index": {"_id": 8 }}'
|
||||||
|
- '{"features": { "first_stage": 8, "second_stage": 3}, "available": true, "group": 1}'
|
||||||
|
- '{"index": {"_id": 9 }}'
|
||||||
|
- '{"features": { "first_stage": 9, "second_stage": 2}, "available": true, "group": 2}'
|
||||||
|
- '{"index": {"_id": 10 }}'
|
||||||
|
- '{"features": { "first_stage": 10, "second_stage": 1}, "available": false, "group": 1}'
|
||||||
|
|
||||||
|
---
|
||||||
|
"Rescorer retriever basic":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
retriever:
|
||||||
|
rescorer:
|
||||||
|
rescore:
|
||||||
|
window_size: 10
|
||||||
|
query:
|
||||||
|
rescore_query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.second_stage"
|
||||||
|
linear: { }
|
||||||
|
query_weight: 0
|
||||||
|
retriever:
|
||||||
|
standard:
|
||||||
|
query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.first_stage"
|
||||||
|
linear: { }
|
||||||
|
size: 2
|
||||||
|
|
||||||
|
- match: { hits.total.value: 10 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.0._score: 10.0 }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
- match: { hits.hits.1._score: 9.0 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
retriever:
|
||||||
|
rescorer:
|
||||||
|
rescore:
|
||||||
|
window_size: 3
|
||||||
|
query:
|
||||||
|
rescore_query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.second_stage"
|
||||||
|
linear: {}
|
||||||
|
query_weight: 0
|
||||||
|
retriever:
|
||||||
|
standard:
|
||||||
|
query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.first_stage"
|
||||||
|
linear: {}
|
||||||
|
size: 2
|
||||||
|
|
||||||
|
- match: {hits.total.value: 10}
|
||||||
|
- match: {hits.hits.0._id: "8"}
|
||||||
|
- match: { hits.hits.0._score: 3.0 }
|
||||||
|
- match: {hits.hits.1._id: "9"}
|
||||||
|
- match: { hits.hits.1._score: 2.0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Rescorer retriever with pre-filters":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
retriever:
|
||||||
|
rescorer:
|
||||||
|
filter:
|
||||||
|
match:
|
||||||
|
available: true
|
||||||
|
rescore:
|
||||||
|
window_size: 10
|
||||||
|
query:
|
||||||
|
rescore_query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.second_stage"
|
||||||
|
linear: { }
|
||||||
|
query_weight: 0
|
||||||
|
retriever:
|
||||||
|
standard:
|
||||||
|
query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.first_stage"
|
||||||
|
linear: { }
|
||||||
|
size: 2
|
||||||
|
|
||||||
|
- match: { hits.total.value: 6 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.0._score: 10.0 }
|
||||||
|
- match: { hits.hits.1._id: "4" }
|
||||||
|
- match: { hits.hits.1._score: 7.0 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
retriever:
|
||||||
|
rescorer:
|
||||||
|
rescore:
|
||||||
|
window_size: 4
|
||||||
|
query:
|
||||||
|
rescore_query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.second_stage"
|
||||||
|
linear: { }
|
||||||
|
query_weight: 0
|
||||||
|
retriever:
|
||||||
|
standard:
|
||||||
|
filter:
|
||||||
|
match:
|
||||||
|
available: true
|
||||||
|
query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.first_stage"
|
||||||
|
linear: { }
|
||||||
|
size: 2
|
||||||
|
|
||||||
|
- match: { hits.total.value: 6 }
|
||||||
|
- match: { hits.hits.0._id: "5" }
|
||||||
|
- match: { hits.hits.0._score: 6.0 }
|
||||||
|
- match: { hits.hits.1._id: "7" }
|
||||||
|
- match: { hits.hits.1._score: 4.0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Rescorer retriever and collapsing":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
retriever:
|
||||||
|
rescorer:
|
||||||
|
rescore:
|
||||||
|
window_size: 10
|
||||||
|
query:
|
||||||
|
rescore_query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.second_stage"
|
||||||
|
linear: { }
|
||||||
|
query_weight: 0
|
||||||
|
retriever:
|
||||||
|
standard:
|
||||||
|
query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.first_stage"
|
||||||
|
linear: { }
|
||||||
|
collapse:
|
||||||
|
field: group
|
||||||
|
size: 3
|
||||||
|
|
||||||
|
- match: { hits.total.value: 10 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.0._score: 10.0 }
|
||||||
|
- match: { hits.hits.1._id: "3" }
|
||||||
|
- match: { hits.hits.1._score: 8.0 }
|
||||||
|
- match: { hits.hits.2._id: "6" }
|
||||||
|
- match: { hits.hits.2._score: 5.0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Rescorer retriever and invalid window size":
|
||||||
|
- do:
|
||||||
|
catch: "/\\[rescorer\\] requires \\[window_size: 5\\] be greater than or equal to \\[size: 10\\]/"
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
retriever:
|
||||||
|
rescorer:
|
||||||
|
rescore:
|
||||||
|
window_size: 5
|
||||||
|
query:
|
||||||
|
rescore_query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.second_stage"
|
||||||
|
linear: { }
|
||||||
|
query_weight: 0
|
||||||
|
retriever:
|
||||||
|
standard:
|
||||||
|
query:
|
||||||
|
rank_feature:
|
||||||
|
field: "features.first_stage"
|
||||||
|
linear: { }
|
||||||
|
size: 10
|
|
@ -218,31 +218,6 @@
|
||||||
- match: {hits.hits.0._source.timestamp: "2019-10-21 00:30:04.828740" }
|
- match: {hits.hits.0._source.timestamp: "2019-10-21 00:30:04.828740" }
|
||||||
- match: {hits.hits.0.sort: [1571617804828740000] }
|
- match: {hits.hits.0.sort: [1571617804828740000] }
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
"_shard_doc sort":
|
|
||||||
- requires:
|
|
||||||
cluster_features: ["gte_v7.12.0"]
|
|
||||||
reason: _shard_doc sort was added in 7.12
|
|
||||||
|
|
||||||
- do:
|
|
||||||
indices.create:
|
|
||||||
index: test
|
|
||||||
- do:
|
|
||||||
index:
|
|
||||||
index: test
|
|
||||||
id: "1"
|
|
||||||
body: { id: 1, foo: bar, age: 18 }
|
|
||||||
|
|
||||||
- do:
|
|
||||||
catch: /\[_shard_doc\] sort field cannot be used without \[point in time\]/
|
|
||||||
search:
|
|
||||||
index: test
|
|
||||||
body:
|
|
||||||
size: 1
|
|
||||||
sort: ["_shard_doc"]
|
|
||||||
search_after: [ 0L ]
|
|
||||||
|
|
||||||
---
|
---
|
||||||
"Format sort values":
|
"Format sort values":
|
||||||
- requires:
|
- requires:
|
||||||
|
|
|
@ -236,7 +236,7 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public void testCreateShrinkIndex() {
|
public void testCreateShrinkIndex() {
|
||||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||||
IndexVersion version = IndexVersionUtils.randomVersion(random());
|
IndexVersion version = IndexVersionUtils.randomWriteVersion();
|
||||||
prepareCreate("source").setSettings(
|
prepareCreate("source").setSettings(
|
||||||
Settings.builder().put(indexSettings()).put("number_of_shards", randomIntBetween(2, 7)).put("index.version.created", version)
|
Settings.builder().put(indexSettings()).put("number_of_shards", randomIntBetween(2, 7)).put("index.version.created", version)
|
||||||
).get();
|
).get();
|
||||||
|
|
|
@ -74,6 +74,7 @@ import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassi
|
||||||
import static org.elasticsearch.test.XContentTestUtils.convertToMap;
|
import static org.elasticsearch.test.XContentTestUtils.convertToMap;
|
||||||
import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder;
|
import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder;
|
||||||
import static org.elasticsearch.test.index.IndexVersionUtils.randomVersion;
|
import static org.elasticsearch.test.index.IndexVersionUtils.randomVersion;
|
||||||
|
import static org.elasticsearch.test.index.IndexVersionUtils.randomWriteVersion;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
|
@ -234,7 +235,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
||||||
|
|
||||||
private DiscoveryNode randomNode(String nodeId) {
|
private DiscoveryNode randomNode(String nodeId) {
|
||||||
Version nodeVersion = VersionUtils.randomVersion(random());
|
Version nodeVersion = VersionUtils.randomVersion(random());
|
||||||
IndexVersion indexVersion = randomVersion(random());
|
IndexVersion indexVersion = randomVersion();
|
||||||
return DiscoveryNodeUtils.builder(nodeId)
|
return DiscoveryNodeUtils.builder(nodeId)
|
||||||
.roles(emptySet())
|
.roles(emptySet())
|
||||||
.version(nodeVersion, IndexVersion.fromId(indexVersion.id() - 1_000_000), indexVersion)
|
.version(nodeVersion, IndexVersion.fromId(indexVersion.id() - 1_000_000), indexVersion)
|
||||||
|
@ -578,7 +579,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
||||||
IndexMetadata.Builder builder = IndexMetadata.builder(name);
|
IndexMetadata.Builder builder = IndexMetadata.builder(name);
|
||||||
Settings.Builder settingsBuilder = Settings.builder();
|
Settings.Builder settingsBuilder = Settings.builder();
|
||||||
setRandomIndexSettings(random(), settingsBuilder);
|
setRandomIndexSettings(random(), settingsBuilder);
|
||||||
settingsBuilder.put(randomSettings(Settings.EMPTY)).put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion(random()));
|
settingsBuilder.put(randomSettings(Settings.EMPTY)).put(IndexMetadata.SETTING_VERSION_CREATED, randomWriteVersion());
|
||||||
builder.settings(settingsBuilder);
|
builder.settings(settingsBuilder);
|
||||||
builder.numberOfShards(randomIntBetween(1, 10)).numberOfReplicas(randomInt(10));
|
builder.numberOfShards(randomIntBetween(1, 10)).numberOfReplicas(randomInt(10));
|
||||||
builder.eventIngestedRange(IndexLongFieldRange.UNKNOWN, TransportVersion.current());
|
builder.eventIngestedRange(IndexLongFieldRange.UNKNOWN, TransportVersion.current());
|
||||||
|
@ -790,7 +791,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
||||||
ImmutableOpenMap.of(),
|
ImmutableOpenMap.of(),
|
||||||
null,
|
null,
|
||||||
SnapshotInfoTestUtils.randomUserMetadata(),
|
SnapshotInfoTestUtils.randomUserMetadata(),
|
||||||
randomVersion(random())
|
randomVersion()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
case 1 -> new RestoreInProgress.Builder().add(
|
case 1 -> new RestoreInProgress.Builder().add(
|
||||||
|
|
|
@ -47,7 +47,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase {
|
||||||
PreBuiltAnalyzers preBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt];
|
PreBuiltAnalyzers preBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt];
|
||||||
String name = preBuiltAnalyzer.name().toLowerCase(Locale.ROOT);
|
String name = preBuiltAnalyzer.name().toLowerCase(Locale.ROOT);
|
||||||
|
|
||||||
IndexVersion randomVersion = IndexVersionUtils.randomVersion(random());
|
IndexVersion randomVersion = IndexVersionUtils.randomWriteVersion();
|
||||||
if (loadedAnalyzers.containsKey(preBuiltAnalyzer) == false) {
|
if (loadedAnalyzers.containsKey(preBuiltAnalyzer) == false) {
|
||||||
loadedAnalyzers.put(preBuiltAnalyzer, new ArrayList<>());
|
loadedAnalyzers.put(preBuiltAnalyzer, new ArrayList<>());
|
||||||
}
|
}
|
||||||
|
|
|
@ -2050,7 +2050,7 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase {
|
||||||
IndexMetadata.SETTING_VERSION_CREATED,
|
IndexMetadata.SETTING_VERSION_CREATED,
|
||||||
IndexVersionUtils.randomVersionBetween(
|
IndexVersionUtils.randomVersionBetween(
|
||||||
random(),
|
random(),
|
||||||
IndexVersionUtils.getFirstVersion(),
|
IndexVersionUtils.getLowestWriteCompatibleVersion(),
|
||||||
IndexVersionUtils.getPreviousVersion(IndexVersions.MERGE_ON_RECOVERY_VERSION)
|
IndexVersionUtils.getPreviousVersion(IndexVersions.MERGE_ON_RECOVERY_VERSION)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.elasticsearch.search.SearchHits;
|
||||||
import org.elasticsearch.search.collapse.CollapseBuilder;
|
import org.elasticsearch.search.collapse.CollapseBuilder;
|
||||||
import org.elasticsearch.search.rescore.QueryRescoreMode;
|
import org.elasticsearch.search.rescore.QueryRescoreMode;
|
||||||
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
||||||
|
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||||
import org.elasticsearch.search.sort.SortBuilders;
|
import org.elasticsearch.search.sort.SortBuilders;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.xcontent.ParseField;
|
import org.elasticsearch.xcontent.ParseField;
|
||||||
|
@ -840,6 +841,20 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
assertResponse(
|
||||||
|
prepareSearch().addSort(SortBuilders.scoreSort())
|
||||||
|
.addSort(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME))
|
||||||
|
.setTrackScores(true)
|
||||||
|
.addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50),
|
||||||
|
response -> {
|
||||||
|
assertThat(response.getHits().getTotalHits().value(), equalTo(5L));
|
||||||
|
assertThat(response.getHits().getHits().length, equalTo(5));
|
||||||
|
for (SearchHit hit : response.getHits().getHits()) {
|
||||||
|
assertThat(hit.getScore(), equalTo(101f));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
record GroupDoc(String id, String group, float firstPassScore, float secondPassScore, boolean shouldFilter) {}
|
record GroupDoc(String id, String group, float firstPassScore, float secondPassScore, boolean shouldFilter) {}
|
||||||
|
@ -879,6 +894,10 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
||||||
.setQuery(fieldValueScoreQuery("firstPassScore"))
|
.setQuery(fieldValueScoreQuery("firstPassScore"))
|
||||||
.addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore")))
|
.addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore")))
|
||||||
.setCollapse(new CollapseBuilder("group"));
|
.setCollapse(new CollapseBuilder("group"));
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.addSort(SortBuilders.scoreSort());
|
||||||
|
request.addSort(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME));
|
||||||
|
}
|
||||||
assertResponse(request, resp -> {
|
assertResponse(request, resp -> {
|
||||||
assertThat(resp.getHits().getTotalHits().value(), equalTo(5L));
|
assertThat(resp.getHits().getTotalHits().value(), equalTo(5L));
|
||||||
assertThat(resp.getHits().getHits().length, equalTo(3));
|
assertThat(resp.getHits().getHits().length, equalTo(3));
|
||||||
|
@ -958,6 +977,10 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
||||||
.addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore")).setQueryWeight(0f).windowSize(numGroups))
|
.addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore")).setQueryWeight(0f).windowSize(numGroups))
|
||||||
.setCollapse(new CollapseBuilder("group"))
|
.setCollapse(new CollapseBuilder("group"))
|
||||||
.setSize(Math.min(numGroups, 10));
|
.setSize(Math.min(numGroups, 10));
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.addSort(SortBuilders.scoreSort());
|
||||||
|
request.addSort(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME));
|
||||||
|
}
|
||||||
long expectedNumHits = numHits;
|
long expectedNumHits = numHits;
|
||||||
assertResponse(request, resp -> {
|
assertResponse(request, resp -> {
|
||||||
assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits));
|
assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits));
|
||||||
|
|
|
@ -141,6 +141,7 @@ public class TransportVersions {
|
||||||
public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0);
|
public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0);
|
||||||
public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_00_0);
|
public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_00_0);
|
||||||
public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0);
|
public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0);
|
||||||
|
public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* WARNING: DO NOT MERGE INTO MAIN!
|
* WARNING: DO NOT MERGE INTO MAIN!
|
||||||
|
|
|
@ -150,10 +150,26 @@ public class ResolvedIndices {
|
||||||
RemoteClusterService remoteClusterService,
|
RemoteClusterService remoteClusterService,
|
||||||
long startTimeInMillis
|
long startTimeInMillis
|
||||||
) {
|
) {
|
||||||
final Map<String, OriginalIndices> remoteClusterIndices = remoteClusterService.groupIndices(
|
return resolveWithIndexNamesAndOptions(
|
||||||
|
request.indices(),
|
||||||
request.indicesOptions(),
|
request.indicesOptions(),
|
||||||
request.indices()
|
clusterState,
|
||||||
|
indexNameExpressionResolver,
|
||||||
|
remoteClusterService,
|
||||||
|
startTimeInMillis
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ResolvedIndices resolveWithIndexNamesAndOptions(
|
||||||
|
String[] indexNames,
|
||||||
|
IndicesOptions indicesOptions,
|
||||||
|
ClusterState clusterState,
|
||||||
|
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||||
|
RemoteClusterService remoteClusterService,
|
||||||
|
long startTimeInMillis
|
||||||
|
) {
|
||||||
|
final Map<String, OriginalIndices> remoteClusterIndices = remoteClusterService.groupIndices(indicesOptions, indexNames);
|
||||||
|
|
||||||
final OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
|
final OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
|
||||||
|
|
||||||
Index[] concreteLocalIndices = localIndices == null
|
Index[] concreteLocalIndices = localIndices == null
|
||||||
|
|
|
@ -456,7 +456,8 @@ public class SearchTransportService {
|
||||||
(request, channel, task) -> searchService.executeQueryPhase(
|
(request, channel, task) -> searchService.executeQueryPhase(
|
||||||
request,
|
request,
|
||||||
(SearchShardTask) task,
|
(SearchShardTask) task,
|
||||||
new ChannelActionListener<>(channel)
|
new ChannelActionListener<>(channel),
|
||||||
|
channel.getVersion()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, true, QuerySearchResult::new);
|
TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, true, QuerySearchResult::new);
|
||||||
|
@ -468,7 +469,8 @@ public class SearchTransportService {
|
||||||
(request, channel, task) -> searchService.executeQueryPhase(
|
(request, channel, task) -> searchService.executeQueryPhase(
|
||||||
request,
|
request,
|
||||||
(SearchShardTask) task,
|
(SearchShardTask) task,
|
||||||
new ChannelActionListener<>(channel)
|
new ChannelActionListener<>(channel),
|
||||||
|
channel.getVersion()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, true, ScrollQuerySearchResult::new);
|
TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, true, ScrollQuerySearchResult::new);
|
||||||
|
|
|
@ -24,6 +24,8 @@ import org.elasticsearch.common.util.set.Sets;
|
||||||
import org.elasticsearch.core.Releasable;
|
import org.elasticsearch.core.Releasable;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.http.HttpTransportSettings;
|
import org.elasticsearch.http.HttpTransportSettings;
|
||||||
|
import org.elasticsearch.rest.RestController;
|
||||||
|
import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.telemetry.tracing.TraceContext;
|
import org.elasticsearch.telemetry.tracing.TraceContext;
|
||||||
|
|
||||||
|
@ -530,6 +532,17 @@ public final class ThreadContext implements Writeable, TraceContext {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the header for the given key or defaultValue if not present
|
||||||
|
*/
|
||||||
|
public String getHeaderOrDefault(String key, String defaultValue) {
|
||||||
|
String value = getHeader(key);
|
||||||
|
if (value == null) {
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns all of the request headers from the thread's context.<br>
|
* Returns all of the request headers from the thread's context.<br>
|
||||||
* <b>Be advised, headers might contain credentials.</b>
|
* <b>Be advised, headers might contain credentials.</b>
|
||||||
|
@ -589,6 +602,14 @@ public final class ThreadContext implements Writeable, TraceContext {
|
||||||
threadLocal.set(threadLocal.get().putHeaders(header));
|
threadLocal.set(threadLocal.get().putHeaders(header));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setErrorTraceTransportHeader(RestRequest r) {
|
||||||
|
// set whether data nodes should send back stack trace based on the `error_trace` query parameter
|
||||||
|
if (r.paramAsBoolean("error_trace", RestController.ERROR_TRACE_DEFAULT)) {
|
||||||
|
// We only set it if error_trace is true (defaults to false) to avoid sending useless bytes
|
||||||
|
putHeader("error_trace", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Puts a transient header object into this context
|
* Puts a transient header object into this context
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -685,29 +685,11 @@ public final class IndexSettings {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify that all nodes can handle this setting
|
|
||||||
var version = (IndexVersion) settings.get(SETTING_INDEX_VERSION_CREATED);
|
|
||||||
if (version.before(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY)
|
|
||||||
&& version.between(
|
|
||||||
IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT,
|
|
||||||
IndexVersions.UPGRADE_TO_LUCENE_10_0_0
|
|
||||||
) == false) {
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
String.format(
|
|
||||||
Locale.ROOT,
|
|
||||||
"The setting [%s] is unavailable on this cluster because some nodes are running older "
|
|
||||||
+ "versions that do not support it. Please upgrade all nodes to the latest version "
|
|
||||||
+ "and try again.",
|
|
||||||
RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Setting<?>> settings() {
|
public Iterator<Setting<?>> settings() {
|
||||||
List<Setting<?>> res = List.of(INDEX_MAPPER_SOURCE_MODE_SETTING, SETTING_INDEX_VERSION_CREATED, MODE);
|
List<Setting<?>> res = List.of(INDEX_MAPPER_SOURCE_MODE_SETTING, MODE);
|
||||||
return res.iterator();
|
return res.iterator();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -1050,6 +1032,24 @@ public final class IndexSettings {
|
||||||
indexMappingSourceMode = scopedSettings.get(INDEX_MAPPER_SOURCE_MODE_SETTING);
|
indexMappingSourceMode = scopedSettings.get(INDEX_MAPPER_SOURCE_MODE_SETTING);
|
||||||
recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings);
|
recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings);
|
||||||
recoverySourceSyntheticEnabled = scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING);
|
recoverySourceSyntheticEnabled = scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING);
|
||||||
|
if (recoverySourceSyntheticEnabled) {
|
||||||
|
// Verify that all nodes can handle this setting
|
||||||
|
if (version.before(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY)
|
||||||
|
&& version.between(
|
||||||
|
IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT,
|
||||||
|
IndexVersions.UPGRADE_TO_LUCENE_10_0_0
|
||||||
|
) == false) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
String.format(
|
||||||
|
Locale.ROOT,
|
||||||
|
"The setting [%s] is unavailable on this cluster because some nodes are running older "
|
||||||
|
+ "versions that do not support it. Please upgrade all nodes to the latest version "
|
||||||
|
+ "and try again.",
|
||||||
|
RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
scopedSettings.addSettingsUpdateConsumer(
|
scopedSettings.addSettingsUpdateConsumer(
|
||||||
MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING,
|
MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING,
|
||||||
|
|
|
@ -14,6 +14,8 @@ import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.SortedNumericSortField;
|
import org.apache.lucene.search.SortedNumericSortField;
|
||||||
import org.apache.lucene.search.SortedSetSortField;
|
import org.apache.lucene.search.SortedSetSortField;
|
||||||
import org.elasticsearch.cluster.metadata.DataStream;
|
import org.elasticsearch.cluster.metadata.DataStream;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationCategory;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
|
@ -53,6 +55,8 @@ import java.util.function.Supplier;
|
||||||
**/
|
**/
|
||||||
public final class IndexSortConfig {
|
public final class IndexSortConfig {
|
||||||
|
|
||||||
|
private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(IndexSortConfig.class);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The list of field names
|
* The list of field names
|
||||||
*/
|
*/
|
||||||
|
@ -134,10 +138,14 @@ public final class IndexSortConfig {
|
||||||
|
|
||||||
// visible for tests
|
// visible for tests
|
||||||
final FieldSortSpec[] sortSpecs;
|
final FieldSortSpec[] sortSpecs;
|
||||||
|
private final IndexVersion indexCreatedVersion;
|
||||||
|
private final String indexName;
|
||||||
private final IndexMode indexMode;
|
private final IndexMode indexMode;
|
||||||
|
|
||||||
public IndexSortConfig(IndexSettings indexSettings) {
|
public IndexSortConfig(IndexSettings indexSettings) {
|
||||||
final Settings settings = indexSettings.getSettings();
|
final Settings settings = indexSettings.getSettings();
|
||||||
|
this.indexCreatedVersion = indexSettings.getIndexVersionCreated();
|
||||||
|
this.indexName = indexSettings.getIndex().getName();
|
||||||
this.indexMode = indexSettings.getMode();
|
this.indexMode = indexSettings.getMode();
|
||||||
|
|
||||||
if (this.indexMode == IndexMode.TIME_SERIES) {
|
if (this.indexMode == IndexMode.TIME_SERIES) {
|
||||||
|
@ -230,7 +238,22 @@ public final class IndexSortConfig {
|
||||||
throw new IllegalArgumentException(err);
|
throw new IllegalArgumentException(err);
|
||||||
}
|
}
|
||||||
if (Objects.equals(ft.name(), sortSpec.field) == false) {
|
if (Objects.equals(ft.name(), sortSpec.field) == false) {
|
||||||
throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field");
|
if (this.indexCreatedVersion.onOrAfter(IndexVersions.V_7_13_0)) {
|
||||||
|
throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field");
|
||||||
|
} else {
|
||||||
|
DEPRECATION_LOGGER.warn(
|
||||||
|
DeprecationCategory.MAPPINGS,
|
||||||
|
"index-sort-aliases",
|
||||||
|
"Index sort for index ["
|
||||||
|
+ indexName
|
||||||
|
+ "] defined on field ["
|
||||||
|
+ sortSpec.field
|
||||||
|
+ "] which resolves to field ["
|
||||||
|
+ ft.name()
|
||||||
|
+ "]. "
|
||||||
|
+ "You will not be able to define an index sort over aliased fields in new indexes"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
boolean reverse = sortSpec.order == null ? false : (sortSpec.order == SortOrder.DESC);
|
boolean reverse = sortSpec.order == null ? false : (sortSpec.order == SortOrder.DESC);
|
||||||
MultiValueMode mode = sortSpec.mode;
|
MultiValueMode mode = sortSpec.mode;
|
||||||
|
|
|
@ -12,7 +12,6 @@ package org.elasticsearch.index;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
import org.elasticsearch.ReleaseVersions;
|
import org.elasticsearch.ReleaseVersions;
|
||||||
import org.elasticsearch.core.Assertions;
|
import org.elasticsearch.core.Assertions;
|
||||||
import org.elasticsearch.core.UpdateForV9;
|
|
||||||
|
|
||||||
import java.lang.reflect.Field;
|
import java.lang.reflect.Field;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
|
@ -25,6 +24,7 @@ import java.util.Set;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
import java.util.function.IntFunction;
|
import java.util.function.IntFunction;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public class IndexVersions {
|
public class IndexVersions {
|
||||||
|
@ -58,7 +58,6 @@ public class IndexVersions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // remove the index versions with which v9 will not need to interact
|
|
||||||
public static final IndexVersion ZERO = def(0, Version.LATEST);
|
public static final IndexVersion ZERO = def(0, Version.LATEST);
|
||||||
|
|
||||||
public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0"));
|
public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0"));
|
||||||
|
@ -244,10 +243,12 @@ public class IndexVersions {
|
||||||
return Collections.unmodifiableNavigableMap(builder);
|
return Collections.unmodifiableNavigableMap(builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA)
|
static Collection<IndexVersion> getAllWriteVersions() {
|
||||||
// We can simplify this once we've removed all references to index versions earlier than MINIMUM_COMPATIBLE
|
return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)).collect(Collectors.toSet());
|
||||||
|
}
|
||||||
|
|
||||||
static Collection<IndexVersion> getAllVersions() {
|
static Collection<IndexVersion> getAllVersions() {
|
||||||
return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(MINIMUM_COMPATIBLE)).toList();
|
return VERSION_IDS.values();
|
||||||
}
|
}
|
||||||
|
|
||||||
static final IntFunction<String> VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class, LATEST_DEFINED.id());
|
static final IntFunction<String> VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class, LATEST_DEFINED.id());
|
||||||
|
|
|
@ -77,7 +77,8 @@ public class MapperFeatures implements FeatureSpecification {
|
||||||
DocumentParser.FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE,
|
DocumentParser.FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE,
|
||||||
CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX,
|
CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX,
|
||||||
META_FETCH_FIELDS_ERROR_CODE_CHANGED,
|
META_FETCH_FIELDS_ERROR_CODE_CHANGED,
|
||||||
SPARSE_VECTOR_STORE_SUPPORT
|
SPARSE_VECTOR_STORE_SUPPORT,
|
||||||
|
SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,13 +10,17 @@
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.common.Explicit;
|
import org.elasticsearch.common.Explicit;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationCategory;
|
||||||
import org.elasticsearch.common.util.Maps;
|
import org.elasticsearch.common.util.Maps;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
|
import org.elasticsearch.index.IndexVersion;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
import org.elasticsearch.xcontent.XContentBuilder;
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -132,6 +136,8 @@ public abstract class MetadataFieldMapper extends FieldMapper {
|
||||||
return build();
|
return build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final Set<String> UNSUPPORTED_PARAMETERS_8_6_0 = Set.of("type", "fields", "copy_to", "boost");
|
||||||
|
|
||||||
public final void parseMetadataField(String name, MappingParserContext parserContext, Map<String, Object> fieldNode) {
|
public final void parseMetadataField(String name, MappingParserContext parserContext, Map<String, Object> fieldNode) {
|
||||||
final Parameter<?>[] params = getParameters();
|
final Parameter<?>[] params = getParameters();
|
||||||
Map<String, Parameter<?>> paramsMap = Maps.newHashMapWithExpectedSize(params.length);
|
Map<String, Parameter<?>> paramsMap = Maps.newHashMapWithExpectedSize(params.length);
|
||||||
|
@ -144,6 +150,22 @@ public abstract class MetadataFieldMapper extends FieldMapper {
|
||||||
final Object propNode = entry.getValue();
|
final Object propNode = entry.getValue();
|
||||||
Parameter<?> parameter = paramsMap.get(propName);
|
Parameter<?> parameter = paramsMap.get(propName);
|
||||||
if (parameter == null) {
|
if (parameter == null) {
|
||||||
|
IndexVersion indexVersionCreated = parserContext.indexVersionCreated();
|
||||||
|
if (indexVersionCreated.before(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
||||||
|
&& UNSUPPORTED_PARAMETERS_8_6_0.contains(propName)) {
|
||||||
|
if (indexVersionCreated.onOrAfter(IndexVersions.V_8_6_0)) {
|
||||||
|
// silently ignore type, and a few other parameters: sadly we've been doing this for a long time
|
||||||
|
deprecationLogger.warn(
|
||||||
|
DeprecationCategory.API,
|
||||||
|
propName,
|
||||||
|
"Parameter [{}] has no effect on metadata field [{}] and will be removed in future",
|
||||||
|
propName,
|
||||||
|
name
|
||||||
|
);
|
||||||
|
}
|
||||||
|
iterator.remove();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
throw new MapperParsingException("unknown parameter [" + propName + "] on metadata field [" + name + "]");
|
throw new MapperParsingException("unknown parameter [" + propName + "] on metadata field [" + name + "]");
|
||||||
}
|
}
|
||||||
parameter.parse(name, parserContext, propNode);
|
parameter.parse(name, parserContext, propNode);
|
||||||
|
|
|
@ -56,6 +56,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
||||||
"mapper.source.remove_synthetic_source_only_validation"
|
"mapper.source.remove_synthetic_source_only_validation"
|
||||||
);
|
);
|
||||||
public static final NodeFeature SOURCE_MODE_FROM_INDEX_SETTING = new NodeFeature("mapper.source.mode_from_index_setting");
|
public static final NodeFeature SOURCE_MODE_FROM_INDEX_SETTING = new NodeFeature("mapper.source.mode_from_index_setting");
|
||||||
|
public static final NodeFeature SYNTHETIC_RECOVERY_SOURCE = new NodeFeature("mapper.synthetic_recovery_source");
|
||||||
|
|
||||||
public static final String NAME = "_source";
|
public static final String NAME = "_source";
|
||||||
public static final String RECOVERY_SOURCE_NAME = "_recovery_source";
|
public static final String RECOVERY_SOURCE_NAME = "_recovery_source";
|
||||||
|
|
|
@ -52,6 +52,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
|
||||||
import org.elasticsearch.index.mapper.DocumentParserContext;
|
import org.elasticsearch.index.mapper.DocumentParserContext;
|
||||||
import org.elasticsearch.index.mapper.DynamicFieldType;
|
import org.elasticsearch.index.mapper.DynamicFieldType;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.Mapper;
|
import org.elasticsearch.index.mapper.Mapper;
|
||||||
import org.elasticsearch.index.mapper.MapperBuilderContext;
|
import org.elasticsearch.index.mapper.MapperBuilderContext;
|
||||||
|
@ -670,7 +671,7 @@ public final class FlattenedFieldMapper extends FieldMapper {
|
||||||
private final boolean isDimension;
|
private final boolean isDimension;
|
||||||
private final int ignoreAbove;
|
private final int ignoreAbove;
|
||||||
|
|
||||||
public RootFlattenedFieldType(
|
RootFlattenedFieldType(
|
||||||
String name,
|
String name,
|
||||||
boolean indexed,
|
boolean indexed,
|
||||||
boolean hasDocValues,
|
boolean hasDocValues,
|
||||||
|
@ -682,7 +683,7 @@ public final class FlattenedFieldMapper extends FieldMapper {
|
||||||
this(name, indexed, hasDocValues, meta, splitQueriesOnWhitespace, eagerGlobalOrdinals, Collections.emptyList(), ignoreAbove);
|
this(name, indexed, hasDocValues, meta, splitQueriesOnWhitespace, eagerGlobalOrdinals, Collections.emptyList(), ignoreAbove);
|
||||||
}
|
}
|
||||||
|
|
||||||
public RootFlattenedFieldType(
|
RootFlattenedFieldType(
|
||||||
String name,
|
String name,
|
||||||
boolean indexed,
|
boolean indexed,
|
||||||
boolean hasDocValues,
|
boolean hasDocValues,
|
||||||
|
@ -806,6 +807,10 @@ public final class FlattenedFieldMapper extends FieldMapper {
|
||||||
return new KeyedFlattenedFieldType(name(), childPath, this);
|
return new KeyedFlattenedFieldType(name(), childPath, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public MappedFieldType getKeyedFieldType() {
|
||||||
|
return new KeywordFieldMapper.KeywordFieldType(name() + KEYED_FIELD_SUFFIX);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isDimension() {
|
public boolean isDimension() {
|
||||||
return isDimension;
|
return isDimension;
|
||||||
|
|
|
@ -55,7 +55,7 @@ import java.util.Objects;
|
||||||
* }`
|
* }`
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class FlattenedFieldSyntheticWriterHelper {
|
public class FlattenedFieldSyntheticWriterHelper {
|
||||||
|
|
||||||
private record Prefix(List<String> prefix) {
|
private record Prefix(List<String> prefix) {
|
||||||
|
|
||||||
|
@ -225,17 +225,17 @@ class FlattenedFieldSyntheticWriterHelper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SortedKeyedValues {
|
public interface SortedKeyedValues {
|
||||||
BytesRef next() throws IOException;
|
BytesRef next() throws IOException;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final SortedKeyedValues sortedKeyedValues;
|
private final SortedKeyedValues sortedKeyedValues;
|
||||||
|
|
||||||
FlattenedFieldSyntheticWriterHelper(final SortedKeyedValues sortedKeyedValues) {
|
public FlattenedFieldSyntheticWriterHelper(final SortedKeyedValues sortedKeyedValues) {
|
||||||
this.sortedKeyedValues = sortedKeyedValues;
|
this.sortedKeyedValues = sortedKeyedValues;
|
||||||
}
|
}
|
||||||
|
|
||||||
void write(final XContentBuilder b) throws IOException {
|
public void write(final XContentBuilder b) throws IOException {
|
||||||
KeyValue curr = new KeyValue(sortedKeyedValues.next());
|
KeyValue curr = new KeyValue(sortedKeyedValues.next());
|
||||||
KeyValue prev = KeyValue.EMPTY;
|
KeyValue prev = KeyValue.EMPTY;
|
||||||
final List<String> values = new ArrayList<>();
|
final List<String> values = new ArrayList<>();
|
||||||
|
|
|
@ -85,7 +85,7 @@ import org.elasticsearch.core.IOUtils;
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.core.UpdateForV9;
|
import org.elasticsearch.core.UpdateForV10;
|
||||||
import org.elasticsearch.discovery.DiscoveryModule;
|
import org.elasticsearch.discovery.DiscoveryModule;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.env.NodeEnvironment;
|
import org.elasticsearch.env.NodeEnvironment;
|
||||||
|
@ -555,9 +555,10 @@ class NodeConstruction {
|
||||||
return settingsModule;
|
return settingsModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
@UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS)
|
@UpdateForV10(owner = UpdateForV10.Owner.SEARCH_FOUNDATIONS)
|
||||||
private static void addBwcSearchWorkerSettings(List<Setting<?>> additionalSettings) {
|
private static void addBwcSearchWorkerSettings(List<Setting<?>> additionalSettings) {
|
||||||
// TODO remove the below settings, they are unused and only here to enable BwC for deployments that still use them
|
// Search workers thread pool has been removed in Elasticsearch 8.16.0. These settings are deprecated and take no effect.
|
||||||
|
// They are here only to enable BwC for deployments that still use them
|
||||||
additionalSettings.add(
|
additionalSettings.add(
|
||||||
Setting.intSetting("thread_pool.search_worker.queue_size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning)
|
Setting.intSetting("thread_pool.search_worker.queue_size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning)
|
||||||
);
|
);
|
||||||
|
|
|
@ -269,5 +269,4 @@ public abstract class BaseRestHandler implements RestHandler {
|
||||||
protected Set<String> responseParams(RestApiVersion restApiVersion) {
|
protected Set<String> responseParams(RestApiVersion restApiVersion) {
|
||||||
return responseParams();
|
return responseParams();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,6 +93,7 @@ public class RestController implements HttpServerTransport.Dispatcher {
|
||||||
public static final String STATUS_CODE_KEY = "es_rest_status_code";
|
public static final String STATUS_CODE_KEY = "es_rest_status_code";
|
||||||
public static final String HANDLER_NAME_KEY = "es_rest_handler_name";
|
public static final String HANDLER_NAME_KEY = "es_rest_handler_name";
|
||||||
public static final String REQUEST_METHOD_KEY = "es_rest_request_method";
|
public static final String REQUEST_METHOD_KEY = "es_rest_request_method";
|
||||||
|
public static final boolean ERROR_TRACE_DEFAULT = false;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
try (InputStream stream = RestController.class.getResourceAsStream("/config/favicon.ico")) {
|
try (InputStream stream = RestController.class.getResourceAsStream("/config/favicon.ico")) {
|
||||||
|
@ -638,7 +639,7 @@ public class RestController implements HttpServerTransport.Dispatcher {
|
||||||
private static void validateErrorTrace(RestRequest request, RestChannel channel) {
|
private static void validateErrorTrace(RestRequest request, RestChannel channel) {
|
||||||
// error_trace cannot be used when we disable detailed errors
|
// error_trace cannot be used when we disable detailed errors
|
||||||
// we consume the error_trace parameter first to ensure that it is always consumed
|
// we consume the error_trace parameter first to ensure that it is always consumed
|
||||||
if (request.paramAsBoolean("error_trace", false) && channel.detailedErrorsEnabled() == false) {
|
if (request.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && channel.detailedErrorsEnabled() == false) {
|
||||||
throw new IllegalArgumentException("error traces in responses are disabled.");
|
throw new IllegalArgumentException("error traces in responses are disabled.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,7 @@ import java.util.Set;
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE;
|
import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE;
|
||||||
import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER;
|
import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER;
|
||||||
|
import static org.elasticsearch.rest.RestController.ERROR_TRACE_DEFAULT;
|
||||||
|
|
||||||
public final class RestResponse implements Releasable {
|
public final class RestResponse implements Releasable {
|
||||||
|
|
||||||
|
@ -143,7 +144,7 @@ public final class RestResponse implements Releasable {
|
||||||
// switched in the xcontent rendering parameters.
|
// switched in the xcontent rendering parameters.
|
||||||
// For authorization problems (RestStatus.UNAUTHORIZED) we don't want to do this since this could
|
// For authorization problems (RestStatus.UNAUTHORIZED) we don't want to do this since this could
|
||||||
// leak information to the caller who is unauthorized to make this call
|
// leak information to the caller who is unauthorized to make this call
|
||||||
if (params.paramAsBoolean("error_trace", false) && status != RestStatus.UNAUTHORIZED) {
|
if (params.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && status != RestStatus.UNAUTHORIZED) {
|
||||||
params = new ToXContent.DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false"), params);
|
params = new ToXContent.DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false"), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -72,6 +72,9 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
|
if (client.threadPool() != null && client.threadPool().getThreadContext() != null) {
|
||||||
|
client.threadPool().getThreadContext().setErrorTraceTransportHeader(request);
|
||||||
|
}
|
||||||
final MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature);
|
final MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature);
|
||||||
return channel -> {
|
return channel -> {
|
||||||
final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
|
final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
|
||||||
|
|
|
@ -95,7 +95,9 @@ public class RestSearchAction extends BaseRestHandler {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
|
if (client.threadPool() != null && client.threadPool().getThreadContext() != null) {
|
||||||
|
client.threadPool().getThreadContext().setErrorTraceTransportHeader(request);
|
||||||
|
}
|
||||||
SearchRequest searchRequest = new SearchRequest();
|
SearchRequest searchRequest = new SearchRequest();
|
||||||
// access the BwC param, but just drop it
|
// access the BwC param, but just drop it
|
||||||
// this might be set by old clients
|
// this might be set by old clients
|
||||||
|
|
|
@ -73,6 +73,7 @@ import org.elasticsearch.search.query.QuerySearchResult;
|
||||||
import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext;
|
import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext;
|
||||||
import org.elasticsearch.search.rank.feature.RankFeatureResult;
|
import org.elasticsearch.search.rank.feature.RankFeatureResult;
|
||||||
import org.elasticsearch.search.rescore.RescoreContext;
|
import org.elasticsearch.search.rescore.RescoreContext;
|
||||||
|
import org.elasticsearch.search.rescore.RescorePhase;
|
||||||
import org.elasticsearch.search.slice.SliceBuilder;
|
import org.elasticsearch.search.slice.SliceBuilder;
|
||||||
import org.elasticsearch.search.sort.SortAndFormats;
|
import org.elasticsearch.search.sort.SortAndFormats;
|
||||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||||
|
@ -377,7 +378,7 @@ final class DefaultSearchContext extends SearchContext {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (rescore != null) {
|
if (rescore != null) {
|
||||||
if (sort != null) {
|
if (RescorePhase.validateSort(sort) == false) {
|
||||||
throw new IllegalArgumentException("Cannot use [sort] option in conjunction with [rescore].");
|
throw new IllegalArgumentException("Cannot use [sort] option in conjunction with [rescore].");
|
||||||
}
|
}
|
||||||
int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow();
|
int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow();
|
||||||
|
|
|
@ -23,4 +23,11 @@ public final class SearchFeatures implements FeatureSpecification {
|
||||||
public Set<NodeFeature> getFeatures() {
|
public Set<NodeFeature> getFeatures() {
|
||||||
return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE);
|
return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled");
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<NodeFeature> getTestFeatures() {
|
||||||
|
return Set.of(RETRIEVER_RESCORER_ENABLED);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -231,6 +231,7 @@ import org.elasticsearch.search.rank.feature.RankFeatureShardResult;
|
||||||
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
||||||
import org.elasticsearch.search.rescore.RescorerBuilder;
|
import org.elasticsearch.search.rescore.RescorerBuilder;
|
||||||
import org.elasticsearch.search.retriever.KnnRetrieverBuilder;
|
import org.elasticsearch.search.retriever.KnnRetrieverBuilder;
|
||||||
|
import org.elasticsearch.search.retriever.RescorerRetrieverBuilder;
|
||||||
import org.elasticsearch.search.retriever.RetrieverBuilder;
|
import org.elasticsearch.search.retriever.RetrieverBuilder;
|
||||||
import org.elasticsearch.search.retriever.RetrieverParserContext;
|
import org.elasticsearch.search.retriever.RetrieverParserContext;
|
||||||
import org.elasticsearch.search.retriever.StandardRetrieverBuilder;
|
import org.elasticsearch.search.retriever.StandardRetrieverBuilder;
|
||||||
|
@ -1080,6 +1081,7 @@ public class SearchModule {
|
||||||
private void registerRetrieverParsers(List<SearchPlugin> plugins) {
|
private void registerRetrieverParsers(List<SearchPlugin> plugins) {
|
||||||
registerRetriever(new RetrieverSpec<>(StandardRetrieverBuilder.NAME, StandardRetrieverBuilder::fromXContent));
|
registerRetriever(new RetrieverSpec<>(StandardRetrieverBuilder.NAME, StandardRetrieverBuilder::fromXContent));
|
||||||
registerRetriever(new RetrieverSpec<>(KnnRetrieverBuilder.NAME, KnnRetrieverBuilder::fromXContent));
|
registerRetriever(new RetrieverSpec<>(KnnRetrieverBuilder.NAME, KnnRetrieverBuilder::fromXContent));
|
||||||
|
registerRetriever(new RetrieverSpec<>(RescorerRetrieverBuilder.NAME, RescorerRetrieverBuilder::fromXContent));
|
||||||
|
|
||||||
registerFromPlugin(plugins, SearchPlugin::getRetrievers, this::registerRetriever);
|
registerFromPlugin(plugins, SearchPlugin::getRetrievers, this::registerRetriever);
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,8 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
|
import org.elasticsearch.TransportVersion;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.ActionRunnable;
|
import org.elasticsearch.action.ActionRunnable;
|
||||||
import org.elasticsearch.action.ResolvedIndices;
|
import org.elasticsearch.action.ResolvedIndices;
|
||||||
|
@ -152,6 +154,7 @@ import java.util.function.Function;
|
||||||
import java.util.function.LongSupplier;
|
import java.util.function.LongSupplier;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
import static org.elasticsearch.TransportVersions.ERROR_TRACE_IN_TRANSPORT_HEADER;
|
||||||
import static org.elasticsearch.core.TimeValue.timeValueHours;
|
import static org.elasticsearch.core.TimeValue.timeValueHours;
|
||||||
import static org.elasticsearch.core.TimeValue.timeValueMillis;
|
import static org.elasticsearch.core.TimeValue.timeValueMillis;
|
||||||
import static org.elasticsearch.core.TimeValue.timeValueMinutes;
|
import static org.elasticsearch.core.TimeValue.timeValueMinutes;
|
||||||
|
@ -272,6 +275,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
|
|
||||||
public static final int DEFAULT_SIZE = 10;
|
public static final int DEFAULT_SIZE = 10;
|
||||||
public static final int DEFAULT_FROM = 0;
|
public static final int DEFAULT_FROM = 0;
|
||||||
|
private static final StackTraceElement[] EMPTY_STACK_TRACE_ARRAY = new StackTraceElement[0];
|
||||||
|
|
||||||
private final ThreadPool threadPool;
|
private final ThreadPool threadPool;
|
||||||
|
|
||||||
|
@ -506,7 +510,41 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
keepAliveReaper.cancel();
|
keepAliveReaper.cancel();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps the listener to avoid sending StackTraces back to the coordinating
|
||||||
|
* node if the `error_trace` header is set to {@code false}. Upon reading we
|
||||||
|
* default to {@code true} to maintain the same behavior as before the change,
|
||||||
|
* due to older nodes not being able to specify whether it needs stack traces.
|
||||||
|
*
|
||||||
|
* @param <T> the type of the response
|
||||||
|
* @param listener the action listener to be wrapped
|
||||||
|
* @param version channel version of the request
|
||||||
|
* @param threadPool with context where to write the new header
|
||||||
|
* @return the wrapped action listener
|
||||||
|
*/
|
||||||
|
static <T> ActionListener<T> maybeWrapListenerForStackTrace(
|
||||||
|
ActionListener<T> listener,
|
||||||
|
TransportVersion version,
|
||||||
|
ThreadPool threadPool
|
||||||
|
) {
|
||||||
|
boolean header = true;
|
||||||
|
if (version.onOrAfter(ERROR_TRACE_IN_TRANSPORT_HEADER) && threadPool.getThreadContext() != null) {
|
||||||
|
header = Boolean.parseBoolean(threadPool.getThreadContext().getHeaderOrDefault("error_trace", "false"));
|
||||||
|
}
|
||||||
|
if (header == false) {
|
||||||
|
return listener.delegateResponse((l, e) -> {
|
||||||
|
ExceptionsHelper.unwrapCausesAndSuppressed(e, err -> {
|
||||||
|
err.setStackTrace(EMPTY_STACK_TRACE_ARRAY);
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
l.onFailure(e);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return listener;
|
||||||
|
}
|
||||||
|
|
||||||
public void executeDfsPhase(ShardSearchRequest request, SearchShardTask task, ActionListener<SearchPhaseResult> listener) {
|
public void executeDfsPhase(ShardSearchRequest request, SearchShardTask task, ActionListener<SearchPhaseResult> listener) {
|
||||||
|
listener = maybeWrapListenerForStackTrace(listener, request.getChannelVersion(), threadPool);
|
||||||
final IndexShard shard = getShard(request);
|
final IndexShard shard = getShard(request);
|
||||||
rewriteAndFetchShardRequest(shard, request, listener.delegateFailure((l, rewritten) -> {
|
rewriteAndFetchShardRequest(shard, request, listener.delegateFailure((l, rewritten) -> {
|
||||||
// fork the execution in the search thread pool
|
// fork the execution in the search thread pool
|
||||||
|
@ -544,10 +582,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
}
|
}
|
||||||
|
|
||||||
public void executeQueryPhase(ShardSearchRequest request, SearchShardTask task, ActionListener<SearchPhaseResult> listener) {
|
public void executeQueryPhase(ShardSearchRequest request, SearchShardTask task, ActionListener<SearchPhaseResult> listener) {
|
||||||
|
ActionListener<SearchPhaseResult> finalListener = maybeWrapListenerForStackTrace(listener, request.getChannelVersion(), threadPool);
|
||||||
assert request.canReturnNullResponseIfMatchNoDocs() == false || request.numberOfShards() > 1
|
assert request.canReturnNullResponseIfMatchNoDocs() == false || request.numberOfShards() > 1
|
||||||
: "empty responses require more than one shard";
|
: "empty responses require more than one shard";
|
||||||
final IndexShard shard = getShard(request);
|
final IndexShard shard = getShard(request);
|
||||||
rewriteAndFetchShardRequest(shard, request, listener.delegateFailure((l, orig) -> {
|
rewriteAndFetchShardRequest(shard, request, finalListener.delegateFailure((l, orig) -> {
|
||||||
// check if we can shortcut the query phase entirely.
|
// check if we can shortcut the query phase entirely.
|
||||||
if (orig.canReturnNullResponseIfMatchNoDocs()) {
|
if (orig.canReturnNullResponseIfMatchNoDocs()) {
|
||||||
assert orig.scroll() == null;
|
assert orig.scroll() == null;
|
||||||
|
@ -561,7 +600,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
);
|
);
|
||||||
CanMatchShardResponse canMatchResp = canMatch(canMatchContext, false);
|
CanMatchShardResponse canMatchResp = canMatch(canMatchContext, false);
|
||||||
if (canMatchResp.canMatch() == false) {
|
if (canMatchResp.canMatch() == false) {
|
||||||
listener.onResponse(QuerySearchResult.nullInstance());
|
finalListener.onResponse(QuerySearchResult.nullInstance());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -736,6 +775,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
}
|
}
|
||||||
|
|
||||||
public void executeRankFeaturePhase(RankFeatureShardRequest request, SearchShardTask task, ActionListener<RankFeatureResult> listener) {
|
public void executeRankFeaturePhase(RankFeatureShardRequest request, SearchShardTask task, ActionListener<RankFeatureResult> listener) {
|
||||||
|
listener = maybeWrapListenerForStackTrace(listener, request.getShardSearchRequest().getChannelVersion(), threadPool);
|
||||||
final ReaderContext readerContext = findReaderContext(request.contextId(), request);
|
final ReaderContext readerContext = findReaderContext(request.contextId(), request);
|
||||||
final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest());
|
final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest());
|
||||||
final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest));
|
final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest));
|
||||||
|
@ -779,8 +819,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
public void executeQueryPhase(
|
public void executeQueryPhase(
|
||||||
InternalScrollSearchRequest request,
|
InternalScrollSearchRequest request,
|
||||||
SearchShardTask task,
|
SearchShardTask task,
|
||||||
ActionListener<ScrollQuerySearchResult> listener
|
ActionListener<ScrollQuerySearchResult> listener,
|
||||||
|
TransportVersion version
|
||||||
) {
|
) {
|
||||||
|
listener = maybeWrapListenerForStackTrace(listener, version, threadPool);
|
||||||
final LegacyReaderContext readerContext = (LegacyReaderContext) findReaderContext(request.contextId(), request);
|
final LegacyReaderContext readerContext = (LegacyReaderContext) findReaderContext(request.contextId(), request);
|
||||||
final Releasable markAsUsed;
|
final Releasable markAsUsed;
|
||||||
try {
|
try {
|
||||||
|
@ -816,7 +858,13 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
||||||
* It is the responsibility of the caller to ensure that the ref count is correctly decremented
|
* It is the responsibility of the caller to ensure that the ref count is correctly decremented
|
||||||
* when the object is no longer needed.
|
* when the object is no longer needed.
|
||||||
*/
|
*/
|
||||||
public void executeQueryPhase(QuerySearchRequest request, SearchShardTask task, ActionListener<QuerySearchResult> listener) {
|
public void executeQueryPhase(
|
||||||
|
QuerySearchRequest request,
|
||||||
|
SearchShardTask task,
|
||||||
|
ActionListener<QuerySearchResult> listener,
|
||||||
|
TransportVersion version
|
||||||
|
) {
|
||||||
|
listener = maybeWrapListenerForStackTrace(listener, version, threadPool);
|
||||||
final ReaderContext readerContext = findReaderContext(request.contextId(), request.shardSearchRequest());
|
final ReaderContext readerContext = findReaderContext(request.contextId(), request.shardSearchRequest());
|
||||||
final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.shardSearchRequest());
|
final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.shardSearchRequest());
|
||||||
final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest));
|
final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest));
|
||||||
|
|
|
@ -48,9 +48,7 @@ import org.elasticsearch.search.retriever.RetrieverBuilder;
|
||||||
import org.elasticsearch.search.retriever.RetrieverParserContext;
|
import org.elasticsearch.search.retriever.RetrieverParserContext;
|
||||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||||
import org.elasticsearch.search.slice.SliceBuilder;
|
import org.elasticsearch.search.slice.SliceBuilder;
|
||||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
|
||||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||||
import org.elasticsearch.search.sort.ShardDocSortField;
|
|
||||||
import org.elasticsearch.search.sort.SortBuilder;
|
import org.elasticsearch.search.sort.SortBuilder;
|
||||||
import org.elasticsearch.search.sort.SortBuilders;
|
import org.elasticsearch.search.sort.SortBuilders;
|
||||||
import org.elasticsearch.search.sort.SortOrder;
|
import org.elasticsearch.search.sort.SortOrder;
|
||||||
|
@ -2341,18 +2339,6 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
||||||
validationException = rescorer.validate(this, validationException);
|
validationException = rescorer.validate(this, validationException);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pointInTimeBuilder() == null && sorts() != null) {
|
|
||||||
for (var sortBuilder : sorts()) {
|
|
||||||
if (sortBuilder instanceof FieldSortBuilder fieldSortBuilder
|
|
||||||
&& ShardDocSortField.NAME.equals(fieldSortBuilder.getFieldName())) {
|
|
||||||
validationException = addValidationError(
|
|
||||||
"[" + FieldSortBuilder.SHARD_DOC_FIELD_NAME + "] sort field cannot be used without [point in time]",
|
|
||||||
validationException
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return validationException;
|
return validationException;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,6 +58,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.profile.query.CollectorResult;
|
import org.elasticsearch.search.profile.query.CollectorResult;
|
||||||
import org.elasticsearch.search.profile.query.InternalProfileCollector;
|
import org.elasticsearch.search.profile.query.InternalProfileCollector;
|
||||||
import org.elasticsearch.search.rescore.RescoreContext;
|
import org.elasticsearch.search.rescore.RescoreContext;
|
||||||
|
import org.elasticsearch.search.rescore.RescorePhase;
|
||||||
import org.elasticsearch.search.sort.SortAndFormats;
|
import org.elasticsearch.search.sort.SortAndFormats;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -238,7 +239,7 @@ abstract class QueryPhaseCollectorManager implements CollectorManager<Collector,
|
||||||
int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs);
|
int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs);
|
||||||
final boolean rescore = searchContext.rescore().isEmpty() == false;
|
final boolean rescore = searchContext.rescore().isEmpty() == false;
|
||||||
if (rescore) {
|
if (rescore) {
|
||||||
assert searchContext.sort() == null;
|
assert RescorePhase.validateSort(searchContext.sort());
|
||||||
for (RescoreContext rescoreContext : searchContext.rescore()) {
|
for (RescoreContext rescoreContext : searchContext.rescore()) {
|
||||||
numDocs = Math.max(numDocs, rescoreContext.getWindowSize());
|
numDocs = Math.max(numDocs, rescoreContext.getWindowSize());
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.apache.lucene.search.FieldDoc;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
|
import org.apache.lucene.search.TopFieldDocs;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.search.SearchShardTask;
|
import org.elasticsearch.action.search.SearchShardTask;
|
||||||
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
|
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
|
||||||
|
@ -22,9 +23,12 @@ import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.query.QueryPhase;
|
import org.elasticsearch.search.query.QueryPhase;
|
||||||
import org.elasticsearch.search.query.SearchTimeoutException;
|
import org.elasticsearch.search.query.SearchTimeoutException;
|
||||||
|
import org.elasticsearch.search.sort.ShardDocSortField;
|
||||||
|
import org.elasticsearch.search.sort.SortAndFormats;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -39,15 +43,27 @@ public class RescorePhase {
|
||||||
if (context.size() == 0 || context.rescore() == null || context.rescore().isEmpty()) {
|
if (context.size() == 0 || context.rescore() == null || context.rescore().isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (validateSort(context.sort()) == false) {
|
||||||
|
throw new IllegalStateException("Cannot use [sort] option in conjunction with [rescore], missing a validate?");
|
||||||
|
}
|
||||||
TopDocs topDocs = context.queryResult().topDocs().topDocs;
|
TopDocs topDocs = context.queryResult().topDocs().topDocs;
|
||||||
if (topDocs.scoreDocs.length == 0) {
|
if (topDocs.scoreDocs.length == 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
// Populate FieldDoc#score using the primary sort field (_score) to ensure compatibility with top docs rescoring
|
||||||
|
Arrays.stream(topDocs.scoreDocs).forEach(t -> {
|
||||||
|
if (t instanceof FieldDoc fieldDoc) {
|
||||||
|
fieldDoc.score = (float) fieldDoc.fields[0];
|
||||||
|
}
|
||||||
|
});
|
||||||
TopFieldGroups topGroups = null;
|
TopFieldGroups topGroups = null;
|
||||||
|
TopFieldDocs topFields = null;
|
||||||
if (topDocs instanceof TopFieldGroups topFieldGroups) {
|
if (topDocs instanceof TopFieldGroups topFieldGroups) {
|
||||||
assert context.collapse() != null;
|
assert context.collapse() != null && validateSortFields(topFieldGroups.fields);
|
||||||
topGroups = topFieldGroups;
|
topGroups = topFieldGroups;
|
||||||
|
} else if (topDocs instanceof TopFieldDocs topFieldDocs) {
|
||||||
|
assert validateSortFields(topFieldDocs.fields);
|
||||||
|
topFields = topFieldDocs;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
Runnable cancellationCheck = getCancellationChecks(context);
|
Runnable cancellationCheck = getCancellationChecks(context);
|
||||||
|
@ -56,17 +72,18 @@ public class RescorePhase {
|
||||||
topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx);
|
topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx);
|
||||||
// It is the responsibility of the rescorer to sort the resulted top docs,
|
// It is the responsibility of the rescorer to sort the resulted top docs,
|
||||||
// here we only assert that this condition is met.
|
// here we only assert that this condition is met.
|
||||||
assert context.sort() == null && topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore";
|
assert topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore";
|
||||||
ctx.setCancellationChecker(null);
|
ctx.setCancellationChecker(null);
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Since rescorers are building top docs with score only, we must reconstruct the {@link TopFieldGroups}
|
||||||
|
* or {@link TopFieldDocs} using their original version before rescoring.
|
||||||
|
*/
|
||||||
if (topGroups != null) {
|
if (topGroups != null) {
|
||||||
assert context.collapse() != null;
|
assert context.collapse() != null;
|
||||||
/**
|
topDocs = rewriteTopFieldGroups(topGroups, topDocs);
|
||||||
* Since rescorers don't preserve collapsing, we must reconstruct the group and field
|
} else if (topFields != null) {
|
||||||
* values from the originalTopGroups to create a new {@link TopFieldGroups} from the
|
topDocs = rewriteTopFieldDocs(topFields, topDocs);
|
||||||
* rescored top documents.
|
|
||||||
*/
|
|
||||||
topDocs = rewriteTopGroups(topGroups, topDocs);
|
|
||||||
}
|
}
|
||||||
context.queryResult()
|
context.queryResult()
|
||||||
.topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats());
|
.topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats());
|
||||||
|
@ -81,29 +98,84 @@ public class RescorePhase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static TopFieldGroups rewriteTopGroups(TopFieldGroups originalTopGroups, TopDocs rescoredTopDocs) {
|
/**
|
||||||
assert originalTopGroups.fields.length == 1 && SortField.FIELD_SCORE.equals(originalTopGroups.fields[0])
|
* Returns whether the provided {@link SortAndFormats} can be used to rescore
|
||||||
: "rescore must always sort by score descending";
|
* top documents.
|
||||||
|
*/
|
||||||
|
public static boolean validateSort(SortAndFormats sortAndFormats) {
|
||||||
|
if (sortAndFormats == null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return validateSortFields(sortAndFormats.sort.getSort());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean validateSortFields(SortField[] fields) {
|
||||||
|
if (fields[0].equals(SortField.FIELD_SCORE) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (fields.length == 1) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The ShardDocSortField can be used as a tiebreaker because it maintains
|
||||||
|
// the natural document ID order within the shard.
|
||||||
|
if (fields[1] instanceof ShardDocSortField == false || fields[1].getReverse()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static TopFieldDocs rewriteTopFieldDocs(TopFieldDocs originalTopFieldDocs, TopDocs rescoredTopDocs) {
|
||||||
|
Map<Integer, FieldDoc> docIdToFieldDoc = Maps.newMapWithExpectedSize(originalTopFieldDocs.scoreDocs.length);
|
||||||
|
for (int i = 0; i < originalTopFieldDocs.scoreDocs.length; i++) {
|
||||||
|
docIdToFieldDoc.put(originalTopFieldDocs.scoreDocs[i].doc, (FieldDoc) originalTopFieldDocs.scoreDocs[i]);
|
||||||
|
}
|
||||||
|
var newScoreDocs = new FieldDoc[rescoredTopDocs.scoreDocs.length];
|
||||||
|
int pos = 0;
|
||||||
|
for (var doc : rescoredTopDocs.scoreDocs) {
|
||||||
|
newScoreDocs[pos] = docIdToFieldDoc.get(doc.doc);
|
||||||
|
newScoreDocs[pos].score = doc.score;
|
||||||
|
newScoreDocs[pos].fields[0] = newScoreDocs[pos].score;
|
||||||
|
pos++;
|
||||||
|
}
|
||||||
|
return new TopFieldDocs(originalTopFieldDocs.totalHits, newScoreDocs, originalTopFieldDocs.fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static TopFieldGroups rewriteTopFieldGroups(TopFieldGroups originalTopGroups, TopDocs rescoredTopDocs) {
|
||||||
|
var newFieldDocs = rewriteFieldDocs((FieldDoc[]) originalTopGroups.scoreDocs, rescoredTopDocs.scoreDocs);
|
||||||
|
|
||||||
Map<Integer, Object> docIdToGroupValue = Maps.newMapWithExpectedSize(originalTopGroups.scoreDocs.length);
|
Map<Integer, Object> docIdToGroupValue = Maps.newMapWithExpectedSize(originalTopGroups.scoreDocs.length);
|
||||||
for (int i = 0; i < originalTopGroups.scoreDocs.length; i++) {
|
for (int i = 0; i < originalTopGroups.scoreDocs.length; i++) {
|
||||||
docIdToGroupValue.put(originalTopGroups.scoreDocs[i].doc, originalTopGroups.groupValues[i]);
|
docIdToGroupValue.put(originalTopGroups.scoreDocs[i].doc, originalTopGroups.groupValues[i]);
|
||||||
}
|
}
|
||||||
var newScoreDocs = new FieldDoc[rescoredTopDocs.scoreDocs.length];
|
|
||||||
var newGroupValues = new Object[originalTopGroups.groupValues.length];
|
var newGroupValues = new Object[originalTopGroups.groupValues.length];
|
||||||
int pos = 0;
|
int pos = 0;
|
||||||
for (var doc : rescoredTopDocs.scoreDocs) {
|
for (var doc : rescoredTopDocs.scoreDocs) {
|
||||||
newScoreDocs[pos] = new FieldDoc(doc.doc, doc.score, new Object[] { doc.score });
|
|
||||||
newGroupValues[pos++] = docIdToGroupValue.get(doc.doc);
|
newGroupValues[pos++] = docIdToGroupValue.get(doc.doc);
|
||||||
}
|
}
|
||||||
return new TopFieldGroups(
|
return new TopFieldGroups(
|
||||||
originalTopGroups.field,
|
originalTopGroups.field,
|
||||||
originalTopGroups.totalHits,
|
originalTopGroups.totalHits,
|
||||||
newScoreDocs,
|
newFieldDocs,
|
||||||
originalTopGroups.fields,
|
originalTopGroups.fields,
|
||||||
newGroupValues
|
newGroupValues
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static FieldDoc[] rewriteFieldDocs(FieldDoc[] originalTopDocs, ScoreDoc[] rescoredTopDocs) {
|
||||||
|
Map<Integer, FieldDoc> docIdToFieldDoc = Maps.newMapWithExpectedSize(rescoredTopDocs.length);
|
||||||
|
Arrays.stream(originalTopDocs).forEach(d -> docIdToFieldDoc.put(d.doc, d));
|
||||||
|
var newDocs = new FieldDoc[rescoredTopDocs.length];
|
||||||
|
int pos = 0;
|
||||||
|
for (var doc : rescoredTopDocs) {
|
||||||
|
newDocs[pos] = docIdToFieldDoc.get(doc.doc);
|
||||||
|
newDocs[pos].score = doc.score;
|
||||||
|
newDocs[pos].fields[0] = doc.score;
|
||||||
|
pos++;
|
||||||
|
}
|
||||||
|
return newDocs;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if the provided docs are sorted by score.
|
* Returns true if the provided docs are sorted by score.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -39,7 +39,7 @@ public abstract class RescorerBuilder<RB extends RescorerBuilder<RB>>
|
||||||
|
|
||||||
protected Integer windowSize;
|
protected Integer windowSize;
|
||||||
|
|
||||||
private static final ParseField WINDOW_SIZE_FIELD = new ParseField("window_size");
|
public static final ParseField WINDOW_SIZE_FIELD = new ParseField("window_size");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct an empty RescoreBuilder.
|
* Construct an empty RescoreBuilder.
|
||||||
|
|
|
@ -32,10 +32,12 @@ import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||||
import org.elasticsearch.search.sort.ShardDocSortField;
|
import org.elasticsearch.search.sort.ShardDocSortField;
|
||||||
import org.elasticsearch.search.sort.SortBuilder;
|
import org.elasticsearch.search.sort.SortBuilder;
|
||||||
|
import org.elasticsearch.xcontent.ParseField;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||||
|
@ -49,6 +51,8 @@ public abstract class CompoundRetrieverBuilder<T extends CompoundRetrieverBuilde
|
||||||
|
|
||||||
public static final NodeFeature INNER_RETRIEVERS_FILTER_SUPPORT = new NodeFeature("inner_retrievers_filter_support");
|
public static final NodeFeature INNER_RETRIEVERS_FILTER_SUPPORT = new NodeFeature("inner_retrievers_filter_support");
|
||||||
|
|
||||||
|
public static final ParseField RANK_WINDOW_SIZE_FIELD = new ParseField("rank_window_size");
|
||||||
|
|
||||||
public record RetrieverSource(RetrieverBuilder retriever, SearchSourceBuilder source) {}
|
public record RetrieverSource(RetrieverBuilder retriever, SearchSourceBuilder source) {}
|
||||||
|
|
||||||
protected final int rankWindowSize;
|
protected final int rankWindowSize;
|
||||||
|
@ -81,6 +85,14 @@ public abstract class CompoundRetrieverBuilder<T extends CompoundRetrieverBuilde
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves the {@link ParseField} used to configure the {@link CompoundRetrieverBuilder#rankWindowSize}
|
||||||
|
* at the REST layer.
|
||||||
|
*/
|
||||||
|
public ParseField getRankWindowSizeField() {
|
||||||
|
return RANK_WINDOW_SIZE_FIELD;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException {
|
public final RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException {
|
||||||
if (ctx.getPointInTimeBuilder() == null) {
|
if (ctx.getPointInTimeBuilder() == null) {
|
||||||
|
@ -209,14 +221,14 @@ public abstract class CompoundRetrieverBuilder<T extends CompoundRetrieverBuilde
|
||||||
validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults);
|
validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults);
|
||||||
if (source.size() > rankWindowSize) {
|
if (source.size() > rankWindowSize) {
|
||||||
validationException = addValidationError(
|
validationException = addValidationError(
|
||||||
"["
|
String.format(
|
||||||
+ this.getName()
|
Locale.ROOT,
|
||||||
+ "] requires [rank_window_size: "
|
"[%s] requires [%s: %d] be greater than or equal to [size: %d]",
|
||||||
+ rankWindowSize
|
getName(),
|
||||||
+ "]"
|
getRankWindowSizeField().getPreferredName(),
|
||||||
+ " be greater than or equal to [size: "
|
rankWindowSize,
|
||||||
+ source.size()
|
source.size()
|
||||||
+ "]",
|
),
|
||||||
validationException
|
validationException
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -231,6 +243,21 @@ public abstract class CompoundRetrieverBuilder<T extends CompoundRetrieverBuilde
|
||||||
}
|
}
|
||||||
for (RetrieverSource innerRetriever : innerRetrievers) {
|
for (RetrieverSource innerRetriever : innerRetrievers) {
|
||||||
validationException = innerRetriever.retriever().validate(source, validationException, isScroll, allowPartialSearchResults);
|
validationException = innerRetriever.retriever().validate(source, validationException, isScroll, allowPartialSearchResults);
|
||||||
|
if (innerRetriever.retriever() instanceof CompoundRetrieverBuilder<?> compoundChild) {
|
||||||
|
if (rankWindowSize > compoundChild.rankWindowSize) {
|
||||||
|
String errorMessage = String.format(
|
||||||
|
Locale.ROOT,
|
||||||
|
"[%s] requires [%s: %d] to be smaller than or equal to its sub retriever's %s [%s: %d]",
|
||||||
|
this.getName(),
|
||||||
|
getRankWindowSizeField().getPreferredName(),
|
||||||
|
rankWindowSize,
|
||||||
|
compoundChild.getName(),
|
||||||
|
compoundChild.getRankWindowSizeField(),
|
||||||
|
compoundChild.rankWindowSize
|
||||||
|
);
|
||||||
|
validationException = addValidationError(errorMessage, validationException);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return validationException;
|
return validationException;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,173 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.retriever;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
|
import org.elasticsearch.search.rank.RankDoc;
|
||||||
|
import org.elasticsearch.search.rescore.RescorerBuilder;
|
||||||
|
import org.elasticsearch.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.xcontent.ParseField;
|
||||||
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import static org.elasticsearch.search.builder.SearchSourceBuilder.RESCORE_FIELD;
|
||||||
|
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A {@link CompoundRetrieverBuilder} that re-scores only the results produced by its child retriever.
|
||||||
|
*/
|
||||||
|
public final class RescorerRetrieverBuilder extends CompoundRetrieverBuilder<RescorerRetrieverBuilder> {
|
||||||
|
|
||||||
|
public static final String NAME = "rescorer";
|
||||||
|
public static final ParseField RETRIEVER_FIELD = new ParseField("retriever");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<RescorerRetrieverBuilder, RetrieverParserContext> PARSER = new ConstructingObjectParser<>(
|
||||||
|
NAME,
|
||||||
|
args -> new RescorerRetrieverBuilder((RetrieverBuilder) args[0], (List<RescorerBuilder<?>>) args[1])
|
||||||
|
);
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareNamedObject(constructorArg(), (parser, context, n) -> {
|
||||||
|
RetrieverBuilder innerRetriever = parser.namedObject(RetrieverBuilder.class, n, context);
|
||||||
|
context.trackRetrieverUsage(innerRetriever.getName());
|
||||||
|
return innerRetriever;
|
||||||
|
}, RETRIEVER_FIELD);
|
||||||
|
PARSER.declareField(constructorArg(), (parser, context) -> {
|
||||||
|
if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
|
||||||
|
List<RescorerBuilder<?>> rescorers = new ArrayList<>();
|
||||||
|
while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
|
rescorers.add(RescorerBuilder.parseFromXContent(parser, name -> context.trackRescorerUsage(name)));
|
||||||
|
}
|
||||||
|
return rescorers;
|
||||||
|
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||||
|
return List.of(RescorerBuilder.parseFromXContent(parser, name -> context.trackRescorerUsage(name)));
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Unknown format for [rescorer.rescore], expects an object or an array of objects, got: " + parser.currentToken()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}, RESCORE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY);
|
||||||
|
RetrieverBuilder.declareBaseParserFields(NAME, PARSER);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RescorerRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException {
|
||||||
|
try {
|
||||||
|
return PARSER.apply(parser, context);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final List<RescorerBuilder<?>> rescorers;
|
||||||
|
|
||||||
|
public RescorerRetrieverBuilder(RetrieverBuilder retriever, List<RescorerBuilder<?>> rescorers) {
|
||||||
|
super(List.of(new RetrieverSource(retriever, null)), extractMinWindowSize(rescorers));
|
||||||
|
if (rescorers.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("Missing rescore definition");
|
||||||
|
}
|
||||||
|
this.rescorers = rescorers;
|
||||||
|
}
|
||||||
|
|
||||||
|
private RescorerRetrieverBuilder(RetrieverSource retriever, List<RescorerBuilder<?>> rescorers) {
|
||||||
|
super(List.of(retriever), extractMinWindowSize(rescorers));
|
||||||
|
this.rescorers = rescorers;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The minimum window size is used as the {@link CompoundRetrieverBuilder#rankWindowSize},
|
||||||
|
* the final number of top documents to return in this retriever.
|
||||||
|
*/
|
||||||
|
private static int extractMinWindowSize(List<RescorerBuilder<?>> rescorers) {
|
||||||
|
int windowSize = Integer.MAX_VALUE;
|
||||||
|
for (var rescore : rescorers) {
|
||||||
|
windowSize = Math.min(rescore.windowSize() == null ? RescorerBuilder.DEFAULT_WINDOW_SIZE : rescore.windowSize(), windowSize);
|
||||||
|
}
|
||||||
|
return windowSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getName() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ParseField getRankWindowSizeField() {
|
||||||
|
return RescorerBuilder.WINDOW_SIZE_FIELD;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SearchSourceBuilder finalizeSourceBuilder(SearchSourceBuilder source) {
|
||||||
|
/**
|
||||||
|
* The re-scorer is passed downstream because this query operates only on
|
||||||
|
* the top documents retrieved by the child retriever.
|
||||||
|
*
|
||||||
|
* - If the sub-retriever is a {@link CompoundRetrieverBuilder}, only the top
|
||||||
|
* documents are re-scored since they are already determined at this stage.
|
||||||
|
* - For other retrievers that do not require a rewrite, the re-scorer's window
|
||||||
|
* size is applied per shard. As a result, more documents are re-scored
|
||||||
|
* compared to the final top documents produced by these retrievers in isolation.
|
||||||
|
*/
|
||||||
|
for (var rescorer : rescorers) {
|
||||||
|
source.addRescorer(rescorer);
|
||||||
|
}
|
||||||
|
return source;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void doToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.field(RETRIEVER_FIELD.getPreferredName(), innerRetrievers.getFirst().retriever());
|
||||||
|
builder.startArray(RESCORE_FIELD.getPreferredName());
|
||||||
|
for (RescorerBuilder<?> rescorer : rescorers) {
|
||||||
|
rescorer.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RescorerRetrieverBuilder clone(List<RetrieverSource> newChildRetrievers, List<QueryBuilder> newPreFilterQueryBuilders) {
|
||||||
|
var newInstance = new RescorerRetrieverBuilder(newChildRetrievers.get(0), rescorers);
|
||||||
|
newInstance.preFilterQueryBuilders = newPreFilterQueryBuilders;
|
||||||
|
return newInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RankDoc[] combineInnerRetrieverResults(List<ScoreDoc[]> rankResults) {
|
||||||
|
assert rankResults.size() == 1;
|
||||||
|
ScoreDoc[] scoreDocs = rankResults.getFirst();
|
||||||
|
RankDoc[] rankDocs = new RankDoc[scoreDocs.length];
|
||||||
|
for (int i = 0; i < scoreDocs.length; i++) {
|
||||||
|
ScoreDoc scoreDoc = scoreDocs[i];
|
||||||
|
rankDocs[i] = new RankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex);
|
||||||
|
rankDocs[i].rank = i + 1;
|
||||||
|
}
|
||||||
|
return rankDocs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean doEquals(Object o) {
|
||||||
|
RescorerRetrieverBuilder that = (RescorerRetrieverBuilder) o;
|
||||||
|
return super.doEquals(o) && Objects.equals(rescorers, that.rescorers);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int doHashCode() {
|
||||||
|
return Objects.hash(super.doHashCode(), rescorers);
|
||||||
|
}
|
||||||
|
}
|
|
@ -63,7 +63,7 @@ public abstract class RetrieverBuilder implements Rewriteable<RetrieverBuilder>,
|
||||||
AbstractObjectParser<? extends RetrieverBuilder, RetrieverParserContext> parser
|
AbstractObjectParser<? extends RetrieverBuilder, RetrieverParserContext> parser
|
||||||
) {
|
) {
|
||||||
parser.declareObjectArray(
|
parser.declareObjectArray(
|
||||||
(r, v) -> r.preFilterQueryBuilders = v,
|
(r, v) -> r.preFilterQueryBuilders = new ArrayList<>(v),
|
||||||
(p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p, c::trackQueryUsage),
|
(p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p, c::trackQueryUsage),
|
||||||
PRE_FILTER_FIELD
|
PRE_FILTER_FIELD
|
||||||
);
|
);
|
||||||
|
|
|
@ -88,6 +88,7 @@ grant codeBase "${codebase.elasticsearch}" {
|
||||||
// this is the test-framework, but the jar is horribly named
|
// this is the test-framework, but the jar is horribly named
|
||||||
grant codeBase "${codebase.framework}" {
|
grant codeBase "${codebase.framework}" {
|
||||||
permission java.lang.RuntimePermission "setSecurityManager";
|
permission java.lang.RuntimePermission "setSecurityManager";
|
||||||
|
permission java.lang.RuntimePermission "createClassLoader";
|
||||||
};
|
};
|
||||||
|
|
||||||
grant codeBase "${codebase.elasticsearch-rest-client}" {
|
grant codeBase "${codebase.elasticsearch-rest-client}" {
|
||||||
|
@ -129,4 +130,5 @@ grant {
|
||||||
permission java.nio.file.LinkPermission "symbolic";
|
permission java.nio.file.LinkPermission "symbolic";
|
||||||
// needed for keystore tests
|
// needed for keystore tests
|
||||||
permission java.lang.RuntimePermission "accessUserInformation";
|
permission java.lang.RuntimePermission "accessUserInformation";
|
||||||
|
permission java.lang.RuntimePermission "getClassLoader";
|
||||||
};
|
};
|
||||||
|
|
|
@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class HumanReadableIndexSettingsTests extends ESTestCase {
|
public class HumanReadableIndexSettingsTests extends ESTestCase {
|
||||||
public void testHumanReadableSettings() {
|
public void testHumanReadableSettings() {
|
||||||
IndexVersion versionCreated = randomVersion(random());
|
IndexVersion versionCreated = randomVersion();
|
||||||
long created = System.currentTimeMillis();
|
long created = System.currentTimeMillis();
|
||||||
Settings testSettings = Settings.builder()
|
Settings testSettings = Settings.builder()
|
||||||
.put(IndexMetadata.SETTING_VERSION_CREATED, versionCreated)
|
.put(IndexMetadata.SETTING_VERSION_CREATED, versionCreated)
|
||||||
|
|
|
@ -387,11 +387,7 @@ public class MetadataCreateIndexServiceTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPrepareResizeIndexSettings() {
|
public void testPrepareResizeIndexSettings() {
|
||||||
final List<IndexVersion> versions = Stream.of(IndexVersionUtils.randomVersion(random()), IndexVersionUtils.randomVersion(random()))
|
final IndexVersion version = IndexVersionUtils.randomWriteVersion();
|
||||||
.sorted()
|
|
||||||
.toList();
|
|
||||||
final IndexVersion version = versions.get(0);
|
|
||||||
final IndexVersion upgraded = versions.get(1);
|
|
||||||
final Settings.Builder indexSettingsBuilder = Settings.builder()
|
final Settings.Builder indexSettingsBuilder = Settings.builder()
|
||||||
.put("index.version.created", version)
|
.put("index.version.created", version)
|
||||||
.put("index.similarity.default.type", "BM25")
|
.put("index.similarity.default.type", "BM25")
|
||||||
|
|
|
@ -103,7 +103,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase {
|
||||||
Map.of(),
|
Map.of(),
|
||||||
null,
|
null,
|
||||||
SnapshotInfoTestUtils.randomUserMetadata(),
|
SnapshotInfoTestUtils.randomUserMetadata(),
|
||||||
IndexVersionUtils.randomVersion(random())
|
IndexVersionUtils.randomVersion()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
final Index index = new Index(indexName, randomUUID());
|
final Index index = new Index(indexName, randomUUID());
|
||||||
|
@ -162,7 +162,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase {
|
||||||
String alias = randomAlphaOfLength(5);
|
String alias = randomAlphaOfLength(5);
|
||||||
|
|
||||||
IndexMetadata idxMetadata = IndexMetadata.builder(index)
|
IndexMetadata idxMetadata = IndexMetadata.builder(index)
|
||||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())))
|
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion()))
|
||||||
.putAlias(AliasMetadata.builder(alias).writeIndex(true).build())
|
.putAlias(AliasMetadata.builder(alias).writeIndex(true).build())
|
||||||
.numberOfShards(1)
|
.numberOfShards(1)
|
||||||
.numberOfReplicas(1)
|
.numberOfReplicas(1)
|
||||||
|
@ -403,7 +403,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase {
|
||||||
IndexMetadata.builder(index.getName())
|
IndexMetadata.builder(index.getName())
|
||||||
.settings(
|
.settings(
|
||||||
indexSettings(
|
indexSettings(
|
||||||
IndexVersionUtils.randomVersion(random()),
|
IndexVersionUtils.randomVersion(),
|
||||||
index.getUUID(),
|
index.getUUID(),
|
||||||
randomIntBetween(1, 3),
|
randomIntBetween(1, 3),
|
||||||
randomIntBetween(0, 2)
|
randomIntBetween(0, 2)
|
||||||
|
@ -438,7 +438,7 @@ public class MetadataDeleteIndexServiceTests extends ESTestCase {
|
||||||
|
|
||||||
private ClusterState clusterState(Index index) {
|
private ClusterState clusterState(Index index) {
|
||||||
final IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
|
final IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
|
||||||
.settings(indexSettings(IndexVersionUtils.randomVersion(random()), index.getUUID(), 1, 1))
|
.settings(indexSettings(IndexVersionUtils.randomVersion(), index.getUUID(), 1, 1))
|
||||||
.build();
|
.build();
|
||||||
final ProjectId projectId = randomProjectIdOrDefault();
|
final ProjectId projectId = randomProjectIdOrDefault();
|
||||||
final Metadata.Builder metadataBuilder = Metadata.builder().put(ProjectMetadata.builder(projectId).put(indexMetadata, false));
|
final Metadata.Builder metadataBuilder = Metadata.builder().put(ProjectMetadata.builder(projectId).put(indexMetadata, false));
|
||||||
|
|
|
@ -864,7 +864,7 @@ public class MetadataIndexAliasesServiceTests extends ESTestCase {
|
||||||
|
|
||||||
private ClusterState createIndex(ClusterState state, String index) {
|
private ClusterState createIndex(ClusterState state, String index) {
|
||||||
IndexMetadata indexMetadata = IndexMetadata.builder(index)
|
IndexMetadata indexMetadata = IndexMetadata.builder(index)
|
||||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random())))
|
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomWriteVersion()))
|
||||||
.numberOfShards(1)
|
.numberOfShards(1)
|
||||||
.numberOfReplicas(1)
|
.numberOfReplicas(1)
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -431,7 +431,7 @@ public class MetadataIndexStateServiceTests extends ESTestCase {
|
||||||
shardsBuilder,
|
shardsBuilder,
|
||||||
null,
|
null,
|
||||||
SnapshotInfoTestUtils.randomUserMetadata(),
|
SnapshotInfoTestUtils.randomUserMetadata(),
|
||||||
IndexVersionUtils.randomVersion(random())
|
IndexVersionUtils.randomVersion()
|
||||||
);
|
);
|
||||||
return ClusterState.builder(newState).putCustom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY.withAddedEntry(entry)).build();
|
return ClusterState.builder(newState).putCustom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY.withAddedEntry(entry)).build();
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ public class NodeMetadataTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private IndexVersion randomIndexVersion() {
|
private IndexVersion randomIndexVersion() {
|
||||||
return rarely() ? IndexVersion.fromId(randomInt()) : IndexVersionUtils.randomVersion(random());
|
return rarely() ? IndexVersion.fromId(randomInt()) : IndexVersionUtils.randomVersion();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEqualsHashcodeSerialization() {
|
public void testEqualsHashcodeSerialization() {
|
||||||
|
|
|
@ -160,6 +160,20 @@ public class IndexSortSettingsTests extends ESTestCase {
|
||||||
assertEquals("Cannot use alias [field] as an index sort field", e.getMessage());
|
assertEquals("Cannot use alias [field] as an index sort field", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testSortingAgainstAliasesPre713() {
|
||||||
|
IndexSettings indexSettings = indexSettings(
|
||||||
|
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_12_0).put("index.sort.field", "field").build()
|
||||||
|
);
|
||||||
|
MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased");
|
||||||
|
Sort sort = buildIndexSort(indexSettings, Map.of("field", aliased));
|
||||||
|
assertThat(sort.getSort(), arrayWithSize(1));
|
||||||
|
assertThat(sort.getSort()[0].getField(), equalTo("aliased"));
|
||||||
|
assertWarnings(
|
||||||
|
"Index sort for index [test] defined on field [field] which resolves to field [aliased]. "
|
||||||
|
+ "You will not be able to define an index sort over aliased fields in new indexes"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
public void testTimeSeriesMode() {
|
public void testTimeSeriesMode() {
|
||||||
IndexSettings indexSettings = indexSettings(
|
IndexSettings indexSettings = indexSettings(
|
||||||
Settings.builder()
|
Settings.builder()
|
||||||
|
|
|
@ -11,7 +11,6 @@ package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.core.UpdateForV9;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.index.IndexVersionUtils;
|
import org.elasticsearch.test.index.IndexVersionUtils;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
@ -151,9 +150,7 @@ public class IndexVersionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA)
|
public void testGetMinimumCompatibleIndexVersion() {
|
||||||
@AwaitsFix(bugUrl = "believe this fails because index version has not yet been bumped to 9.0")
|
|
||||||
public void testMinimumCompatibleVersion() {
|
|
||||||
assertThat(IndexVersion.getMinimumCompatibleIndexVersion(7170099), equalTo(IndexVersion.fromId(6000099)));
|
assertThat(IndexVersion.getMinimumCompatibleIndexVersion(7170099), equalTo(IndexVersion.fromId(6000099)));
|
||||||
assertThat(IndexVersion.getMinimumCompatibleIndexVersion(8000099), equalTo(IndexVersion.fromId(7000099)));
|
assertThat(IndexVersion.getMinimumCompatibleIndexVersion(8000099), equalTo(IndexVersion.fromId(7000099)));
|
||||||
assertThat(IndexVersion.getMinimumCompatibleIndexVersion(10000000), equalTo(IndexVersion.fromId(9000000)));
|
assertThat(IndexVersion.getMinimumCompatibleIndexVersion(10000000), equalTo(IndexVersion.fromId(9000000)));
|
||||||
|
@ -193,8 +190,6 @@ public class IndexVersionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA)
|
|
||||||
@AwaitsFix(bugUrl = "can be unmuted once lucene is bumped to version 10")
|
|
||||||
public void testLuceneVersionOnUnknownVersions() {
|
public void testLuceneVersionOnUnknownVersions() {
|
||||||
// between two known versions, should use the lucene version of the previous version
|
// between two known versions, should use the lucene version of the previous version
|
||||||
IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion();
|
IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion();
|
||||||
|
@ -207,7 +202,7 @@ public class IndexVersionTests extends ESTestCase {
|
||||||
|
|
||||||
// too old version, major should be the oldest supported lucene version minus 1
|
// too old version, major should be the oldest supported lucene version minus 1
|
||||||
IndexVersion oldVersion = IndexVersion.fromId(5020199);
|
IndexVersion oldVersion = IndexVersion.fromId(5020199);
|
||||||
assertThat(oldVersion.luceneVersion().major, equalTo(IndexVersionUtils.getFirstVersion().luceneVersion().major - 1));
|
assertThat(oldVersion.luceneVersion().major, equalTo(IndexVersionUtils.getLowestReadCompatibleVersion().luceneVersion().major - 1));
|
||||||
|
|
||||||
// future version, should be the same version as today
|
// future version, should be the same version as today
|
||||||
IndexVersion futureVersion = IndexVersion.fromId(currentVersion.id() + 100);
|
IndexVersion futureVersion = IndexVersion.fromId(currentVersion.id() + 100);
|
||||||
|
|
|
@ -106,7 +106,7 @@ public class AnalysisRegistryTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDefaultAnalyzers() throws IOException {
|
public void testDefaultAnalyzers() throws IOException {
|
||||||
IndexVersion version = IndexVersionUtils.randomVersion(random());
|
IndexVersion version = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
@ -120,7 +120,7 @@ public class AnalysisRegistryTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOverrideDefaultAnalyzer() throws IOException {
|
public void testOverrideDefaultAnalyzer() throws IOException {
|
||||||
IndexVersion version = IndexVersionUtils.randomVersion(random());
|
IndexVersion version = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||||
IndexAnalyzers indexAnalyzers = AnalysisRegistry.build(
|
IndexAnalyzers indexAnalyzers = AnalysisRegistry.build(
|
||||||
IndexCreationContext.CREATE_INDEX,
|
IndexCreationContext.CREATE_INDEX,
|
||||||
|
@ -137,7 +137,7 @@ public class AnalysisRegistryTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOverrideDefaultAnalyzerWithoutAnalysisModeAll() throws IOException {
|
public void testOverrideDefaultAnalyzerWithoutAnalysisModeAll() throws IOException {
|
||||||
IndexVersion version = IndexVersionUtils.randomVersion(random());
|
IndexVersion version = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
TokenFilterFactory tokenFilter = new AbstractTokenFilterFactory("my_filter") {
|
TokenFilterFactory tokenFilter = new AbstractTokenFilterFactory("my_filter") {
|
||||||
|
@ -216,7 +216,7 @@ public class AnalysisRegistryTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOverrideDefaultSearchAnalyzer() {
|
public void testOverrideDefaultSearchAnalyzer() {
|
||||||
IndexVersion version = IndexVersionUtils.randomVersion(random());
|
IndexVersion version = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||||
IndexAnalyzers indexAnalyzers = AnalysisRegistry.build(
|
IndexAnalyzers indexAnalyzers = AnalysisRegistry.build(
|
||||||
IndexCreationContext.CREATE_INDEX,
|
IndexCreationContext.CREATE_INDEX,
|
||||||
|
@ -319,8 +319,8 @@ public class AnalysisRegistryTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoTypeOrTokenizerErrorMessage() throws IOException {
|
public void testNoTypeOrTokenizerErrorMessage() {
|
||||||
IndexVersion version = IndexVersionUtils.randomVersion(random());
|
IndexVersion version = IndexVersionUtils.randomVersion();
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
||||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(IndexVersion.current())
|
PreBuiltAnalyzers.STANDARD.getAnalyzer(IndexVersion.current())
|
||||||
);
|
);
|
||||||
// same index version should be cached
|
// same index version should be cached
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion();
|
||||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(v), PreBuiltAnalyzers.STANDARD.getAnalyzer(v));
|
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(v), PreBuiltAnalyzers.STANDARD.getAnalyzer(v));
|
||||||
assertNotSame(
|
assertNotSame(
|
||||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(IndexVersion.current()),
|
PreBuiltAnalyzers.STANDARD.getAnalyzer(IndexVersion.current()),
|
||||||
|
@ -71,7 +71,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Same Lucene version should be cached:
|
// Same Lucene version should be cached:
|
||||||
IndexVersion v1 = IndexVersionUtils.randomVersion(random());
|
IndexVersion v1 = IndexVersionUtils.randomVersion();
|
||||||
IndexVersion v2 = new IndexVersion(v1.id() - 1, v1.luceneVersion());
|
IndexVersion v2 = new IndexVersion(v1.id() - 1, v1.luceneVersion());
|
||||||
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(v1), PreBuiltAnalyzers.STOP.getAnalyzer(v2));
|
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(v1), PreBuiltAnalyzers.STOP.getAnalyzer(v2));
|
||||||
}
|
}
|
||||||
|
@ -81,7 +81,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
||||||
PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt];
|
PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt];
|
||||||
String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT);
|
String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT);
|
||||||
|
|
||||||
IndexVersion randomVersion = IndexVersionUtils.randomVersion(random());
|
IndexVersion randomVersion = IndexVersionUtils.randomWriteVersion();
|
||||||
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion).build();
|
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion).build();
|
||||||
|
|
||||||
NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(
|
NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class PreConfiguredTokenFilterTests extends ESTestCase {
|
||||||
|
|
||||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY);
|
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY);
|
||||||
|
|
||||||
IndexVersion version1 = IndexVersionUtils.randomVersion(random());
|
IndexVersion version1 = IndexVersionUtils.randomVersion();
|
||||||
Settings settings1 = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1).build();
|
Settings settings1 = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1).build();
|
||||||
TokenFilterFactory tff_v1_1 = pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1);
|
TokenFilterFactory tff_v1_1 = pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1);
|
||||||
TokenFilterFactory tff_v1_2 = pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1);
|
TokenFilterFactory tff_v1_2 = pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1);
|
||||||
|
@ -66,7 +66,7 @@ public class PreConfiguredTokenFilterTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
IndexVersion version1 = IndexVersionUtils.randomVersion(random());
|
IndexVersion version1 = IndexVersionUtils.randomVersion();
|
||||||
IndexSettings indexSettings1 = IndexSettingsModule.newIndexSettings(
|
IndexSettings indexSettings1 = IndexSettingsModule.newIndexSettings(
|
||||||
"test",
|
"test",
|
||||||
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1).build()
|
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version1).build()
|
||||||
|
@ -133,7 +133,7 @@ public class PreConfiguredTokenFilterTests extends ESTestCase {
|
||||||
);
|
);
|
||||||
assertSame(tff_v1_1, tff_v1_2);
|
assertSame(tff_v1_1, tff_v1_2);
|
||||||
|
|
||||||
IndexVersion version2 = IndexVersionUtils.getPreviousMajorVersion(IndexVersionUtils.getFirstVersion());
|
IndexVersion version2 = IndexVersionUtils.getPreviousMajorVersion(IndexVersionUtils.getLowestReadCompatibleVersion());
|
||||||
IndexSettings indexSettings2 = IndexSettingsModule.newIndexSettings(
|
IndexSettings indexSettings2 = IndexSettingsModule.newIndexSettings(
|
||||||
"test",
|
"test",
|
||||||
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version2).build()
|
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version2).build()
|
||||||
|
|
|
@ -6645,7 +6645,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
for (IndexVersion createdVersion : List.of(
|
for (IndexVersion createdVersion : List.of(
|
||||||
IndexVersion.current(),
|
IndexVersion.current(),
|
||||||
lowestCompatiblePreviousVersion,
|
lowestCompatiblePreviousVersion,
|
||||||
IndexVersionUtils.getFirstVersion()
|
IndexVersionUtils.getLowestWriteCompatibleVersion()
|
||||||
)) {
|
)) {
|
||||||
Settings settings = Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_VERSION_CREATED, createdVersion).build();
|
Settings settings = Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_VERSION_CREATED, createdVersion).build();
|
||||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
|
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
|
||||||
|
|
|
@ -44,7 +44,7 @@ import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
|
|
||||||
public class RecoverySourcePruneMergePolicyTests extends ESTestCase {
|
public class RecoverySourcePruneMergePolicyTests extends ESTestCase {
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ public class RecoverySourcePruneMergePolicyTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
assertEquals(i, extra_source.docID());
|
assertEquals(i, extra_source.docID());
|
||||||
if (syntheticRecoverySource) {
|
if (syntheticRecoverySource) {
|
||||||
assertThat(extra_source.longValue(), greaterThan(10L));
|
assertThat(extra_source.longValue(), greaterThanOrEqualTo(10L));
|
||||||
} else {
|
} else {
|
||||||
assertThat(extra_source.longValue(), equalTo(1L));
|
assertThat(extra_source.longValue(), equalTo(1L));
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue