Merge main into multi-project

This commit is contained in:
Yang Wang 2025-02-05 11:03:29 +11:00
commit fee57daf53
138 changed files with 3093 additions and 2289 deletions

View file

@ -163,6 +163,7 @@ public class MrjarPlugin implements Plugin<Project> {
project.getConfigurations().register("java" + javaVersion);
TaskProvider<Jar> jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> {
task.from(sourceSet.getOutput());
task.getArchiveClassifier().set("java" + javaVersion);
});
project.getArtifacts().add("java" + javaVersion, jarTask);
}

View file

@ -60,6 +60,7 @@ import static org.gradle.api.JavaVersion.VERSION_20;
import static org.gradle.api.JavaVersion.VERSION_21;
import static org.gradle.api.JavaVersion.VERSION_22;
import static org.gradle.api.JavaVersion.VERSION_23;
import static org.gradle.api.JavaVersion.VERSION_24;
@CacheableTask
public abstract class ThirdPartyAuditTask extends DefaultTask {
@ -341,8 +342,12 @@ public abstract class ThirdPartyAuditTask extends DefaultTask {
spec.setExecutable(javaHome.get() + "/bin/java");
}
spec.classpath(getForbiddenAPIsClasspath(), getThirdPartyClasspath());
// Enable explicitly for each release as appropriate. Just JDK 20/21/22/23 for now, and just the vector module.
if (isJavaVersion(VERSION_20) || isJavaVersion(VERSION_21) || isJavaVersion(VERSION_22) || isJavaVersion(VERSION_23)) {
// Enable explicitly for each release as appropriate. Just JDK 20/21/22/23/24 for now, and just the vector module.
if (isJavaVersion(VERSION_20)
|| isJavaVersion(VERSION_21)
|| isJavaVersion(VERSION_22)
|| isJavaVersion(VERSION_23)
|| isJavaVersion(VERSION_24)) {
spec.jvmArgs("--add-modules", "jdk.incubator.vector");
}
spec.jvmArgs("-Xmx1g");

View file

@ -1,5 +0,0 @@
pr: 120807
summary: Remove INDEX_REFRESH_BLOCK after index becomes searchable
area: CRUD
type: enhancement
issues: []

View file

@ -30,7 +30,7 @@
}
],
"examples" : [
"FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;"
"FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5"
],
"preview" : true,
"snapshot_only" : false

View file

@ -20,10 +20,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -45,10 +45,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -70,10 +70,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -95,10 +95,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -120,10 +120,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -145,10 +145,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -170,10 +170,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -195,10 +195,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -220,10 +220,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -245,10 +245,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -270,10 +270,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -295,10 +295,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -320,10 +320,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -345,10 +345,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -370,10 +370,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -395,10 +395,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -420,10 +420,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -445,10 +445,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -470,10 +470,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -495,10 +495,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -520,10 +520,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -545,10 +545,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -570,10 +570,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -595,10 +595,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -620,10 +620,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -645,10 +645,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -670,10 +670,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -695,10 +695,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -720,10 +720,10 @@
},
{
"name" : "options",
"type" : "function named parameters",
"type" : "function_named_parameters",
"mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}",
"optional" : true,
"description" : "Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
"description" : "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>. See <<query-dsl-match-query,match query>> for more information."
}
],
"variadic" : false,
@ -731,7 +731,7 @@
}
],
"examples" : [
"FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;",
"FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5",
"FROM books \n| WHERE MATCH(title, \"Hobbit Back Again\", {\"operator\": \"AND\"})\n| KEEP title;"
],
"preview" : true,

View file

@ -529,7 +529,7 @@
}
],
"examples" : [
"FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;"
"FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5"
],
"preview" : true,
"snapshot_only" : false

View file

@ -30,7 +30,7 @@
}
],
"examples" : [
"FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;"
"FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5"
],
"preview" : true,
"snapshot_only" : false

View file

@ -10,5 +10,5 @@ FROM books
| WHERE KQL("author: Faulkner")
| KEEP book_no, author
| SORT book_no
| LIMIT 5;
| LIMIT 5
```

View file

@ -21,5 +21,5 @@ FROM books
| WHERE MATCH(author, "Faulkner")
| KEEP book_no, author
| SORT book_no
| LIMIT 5;
| LIMIT 5
```

View file

@ -18,5 +18,5 @@ FROM books
| WHERE MATCH(author, "Faulkner")
| KEEP book_no, author
| SORT book_no
| LIMIT 5;
| LIMIT 5
```

View file

@ -10,5 +10,5 @@ FROM books
| WHERE QSTR("author: Faulkner")
| KEEP book_no, author
| SORT book_no
| LIMIT 5;
| LIMIT 5
```

View file

@ -23,16 +23,22 @@ import org.objectweb.asm.Type;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class InstrumentationServiceImpl implements InstrumentationService {
private static final String OBJECT_INTERNAL_NAME = Type.getInternalName(Object.class);
@Override
public Instrumenter newInstrumenter(Class<?> clazz, Map<MethodKey, CheckMethod> methods) {
return InstrumenterImpl.create(clazz, methods);
@ -40,10 +46,36 @@ public class InstrumentationServiceImpl implements InstrumentationService {
@Override
public Map<MethodKey, CheckMethod> lookupMethods(Class<?> checkerClass) throws IOException {
var methodsToInstrument = new HashMap<MethodKey, CheckMethod>();
var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass);
Map<MethodKey, CheckMethod> methodsToInstrument = new HashMap<>();
Set<Class<?>> visitedClasses = new HashSet<>();
ArrayDeque<Class<?>> classesToVisit = new ArrayDeque<>(Collections.singleton(checkerClass));
while (classesToVisit.isEmpty() == false) {
var currentClass = classesToVisit.remove();
if (visitedClasses.contains(currentClass)) {
continue;
}
visitedClasses.add(currentClass);
var classFileInfo = InstrumenterImpl.getClassFileInfo(currentClass);
ClassReader reader = new ClassReader(classFileInfo.bytecodes());
ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) {
@Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
super.visit(version, access, name, signature, superName, interfaces);
try {
if (OBJECT_INTERNAL_NAME.equals(superName) == false) {
classesToVisit.add(Class.forName(Type.getObjectType(superName).getClassName()));
}
for (var interfaceName : interfaces) {
classesToVisit.add(Class.forName(Type.getObjectType(interfaceName).getClassName()));
}
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Cannot inspect checker class " + checkerClass.getName(), e);
}
}
@Override
public MethodVisitor visitMethod(
int access,
@ -58,14 +90,19 @@ public class InstrumentationServiceImpl implements InstrumentationService {
var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes);
var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList();
var checkMethod = new CheckMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors);
var checkMethod = new CheckMethod(
Type.getInternalName(currentClass),
checkerMethodName,
checkerParameterDescriptors
);
methodsToInstrument.put(methodToInstrument, checkMethod);
methodsToInstrument.putIfAbsent(methodToInstrument, checkMethod);
}
return mv;
}
};
reader.accept(visitor, 0);
}
return methodsToInstrument;
}

View file

@ -55,6 +55,14 @@ public class InstrumentationServiceImplTests extends ESTestCase {
void check$org_example_TestTargetClass$instanceMethodWithArgs(Class<?> clazz, TestTargetClass that, int x, int y);
}
interface TestCheckerDerived extends TestChecker {
void check$org_example_TestTargetClass$instanceMethodNoArgs(Class<?> clazz, TestTargetClass that);
void check$org_example_TestTargetClass$differentInstanceMethod(Class<?> clazz, TestTargetClass that);
}
interface TestCheckerDerived2 extends TestCheckerDerived, TestChecker {}
interface TestCheckerOverloads {
void check$org_example_TestTargetClass$$staticMethodWithOverload(Class<?> clazz, int x, int y);
@ -160,6 +168,75 @@ public class InstrumentationServiceImplTests extends ESTestCase {
);
}
public void testInstrumentationTargetLookupWithDerivedClass() throws IOException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerDerived2.class);
assertThat(checkMethods, aMapWithSize(4));
assertThat(
checkMethods,
hasEntry(
equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))),
equalTo(
new CheckMethod(
"org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker",
"check$org_example_TestTargetClass$$staticMethod",
List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;")
)
)
)
);
assertThat(
checkMethods,
hasEntry(
equalTo(new MethodKey("org/example/TestTargetClass", "instanceMethodNoArgs", List.of())),
equalTo(
new CheckMethod(
"org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerDerived",
"check$org_example_TestTargetClass$instanceMethodNoArgs",
List.of(
"Ljava/lang/Class;",
"Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;"
)
)
)
)
);
assertThat(
checkMethods,
hasEntry(
equalTo(new MethodKey("org/example/TestTargetClass", "instanceMethodWithArgs", List.of("I", "I"))),
equalTo(
new CheckMethod(
"org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker",
"check$org_example_TestTargetClass$instanceMethodWithArgs",
List.of(
"Ljava/lang/Class;",
"Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;",
"I",
"I"
)
)
)
)
);
assertThat(
checkMethods,
hasEntry(
equalTo(new MethodKey("org/example/TestTargetClass", "differentInstanceMethod", List.of())),
equalTo(
new CheckMethod(
"org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerDerived",
"check$org_example_TestTargetClass$differentInstanceMethod",
List.of(
"Ljava/lang/Class;",
"Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;"
)
)
)
)
);
}
public void testInstrumentationTargetLookupWithCtors() throws IOException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerCtors.class);

View file

@ -81,7 +81,7 @@ public interface EntitlementChecker {
/// /////////////////
//
// ClassLoader ctor
// create class loaders
//
void check$java_lang_ClassLoader$(Class<?> callerClass);
@ -90,22 +90,6 @@ public interface EntitlementChecker {
void check$java_lang_ClassLoader$(Class<?> callerClass, String name, ClassLoader parent);
/// /////////////////
//
// SecureClassLoader ctor
//
void check$java_security_SecureClassLoader$(Class<?> callerClass);
void check$java_security_SecureClassLoader$(Class<?> callerClass, ClassLoader parent);
void check$java_security_SecureClassLoader$(Class<?> callerClass, String name, ClassLoader parent);
/// /////////////////
//
// URLClassLoader constructors
//
void check$java_net_URLClassLoader$(Class<?> callerClass, URL[] urls);
void check$java_net_URLClassLoader$(Class<?> callerClass, URL[] urls, ClassLoader parent);
@ -116,6 +100,12 @@ public interface EntitlementChecker {
void check$java_net_URLClassLoader$(Class<?> callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory);
void check$java_security_SecureClassLoader$(Class<?> callerClass);
void check$java_security_SecureClassLoader$(Class<?> callerClass, ClassLoader parent);
void check$java_security_SecureClassLoader$(Class<?> callerClass, String name, ClassLoader parent);
/// /////////////////
//
// "setFactory" methods
@ -143,6 +133,8 @@ public interface EntitlementChecker {
// System Properties and similar
//
void check$java_lang_System$$setProperties(Class<?> callerClass, Properties props);
void check$java_lang_System$$setProperty(Class<?> callerClass, String key, String value);
void check$java_lang_System$$clearProperty(Class<?> callerClass, String key);
@ -152,33 +144,33 @@ public interface EntitlementChecker {
// JVM-wide state changes
//
void check$java_lang_System$$setIn(Class<?> callerClass, InputStream in);
void check$java_lang_System$$setOut(Class<?> callerClass, PrintStream out);
void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class<?> callerClass);
void check$java_lang_System$$setErr(Class<?> callerClass, PrintStream err);
void check$java_lang_System$$setProperties(Class<?> callerClass, Properties props);
void check$java_lang_System$$setIn(Class<?> callerClass, InputStream in);
void check$java_lang_System$$setOut(Class<?> callerClass, PrintStream out);
void check$java_lang_Runtime$addShutdownHook(Class<?> callerClass, Runtime runtime, Thread hook);
void check$java_lang_Runtime$removeShutdownHook(Class<?> callerClass, Runtime runtime, Thread hook);
void check$jdk_tools_jlink_internal_Jlink$(Class<?> callerClass);
void check$jdk_tools_jlink_internal_Main$$run(Class<?> callerClass, PrintWriter out, PrintWriter err, String... args);
void check$jdk_vm_ci_services_JVMCIServiceLocator$$getProviders(Class<?> callerClass, Class<?> service);
void check$jdk_vm_ci_services_Services$$load(Class<?> callerClass, Class<?> service);
void check$jdk_vm_ci_services_Services$$loadSingle(Class<?> callerClass, Class<?> service, boolean required);
void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class<?> callerClass);
void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class<?> callerClass, Thread.UncaughtExceptionHandler ueh);
void check$java_util_spi_LocaleServiceProvider$(Class<?> callerClass);
void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class<?> callerClass, DatagramSocketImplFactory fac);
void check$java_net_HttpURLConnection$$setFollowRedirects(Class<?> callerClass, boolean set);
void check$java_net_ServerSocket$$setSocketFactory(Class<?> callerClass, SocketImplFactory fac);
void check$java_net_Socket$$setSocketImplFactory(Class<?> callerClass, SocketImplFactory fac);
void check$java_net_URL$$setURLStreamHandlerFactory(Class<?> callerClass, URLStreamHandlerFactory fac);
void check$java_net_URLConnection$$setFileNameMap(Class<?> callerClass, FileNameMap map);
void check$java_net_URLConnection$$setContentHandlerFactory(Class<?> callerClass, ContentHandlerFactory fac);
void check$java_text_spi_BreakIteratorProvider$(Class<?> callerClass);
@ -200,6 +192,8 @@ public interface EntitlementChecker {
void check$java_util_spi_LocaleNameProvider$(Class<?> callerClass);
void check$java_util_spi_LocaleServiceProvider$(Class<?> callerClass);
void check$java_util_spi_TimeZoneNameProvider$(Class<?> callerClass);
void check$java_util_logging_LogManager$(Class<?> callerClass);
@ -210,19 +204,15 @@ public interface EntitlementChecker {
void check$java_util_TimeZone$$setDefault(Class<?> callerClass, TimeZone zone);
void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class<?> callerClass, DatagramSocketImplFactory fac);
void check$jdk_tools_jlink_internal_Jlink$(Class<?> callerClass);
void check$java_net_HttpURLConnection$$setFollowRedirects(Class<?> callerClass, boolean set);
void check$jdk_tools_jlink_internal_Main$$run(Class<?> callerClass, PrintWriter out, PrintWriter err, String... args);
void check$java_net_ServerSocket$$setSocketFactory(Class<?> callerClass, SocketImplFactory fac);
void check$jdk_vm_ci_services_JVMCIServiceLocator$$getProviders(Class<?> callerClass, Class<?> service);
void check$java_net_Socket$$setSocketImplFactory(Class<?> callerClass, SocketImplFactory fac);
void check$jdk_vm_ci_services_Services$$load(Class<?> callerClass, Class<?> service);
void check$java_net_URL$$setURLStreamHandlerFactory(Class<?> callerClass, URLStreamHandlerFactory fac);
void check$java_net_URLConnection$$setFileNameMap(Class<?> callerClass, FileNameMap map);
void check$java_net_URLConnection$$setContentHandlerFactory(Class<?> callerClass, ContentHandlerFactory fac);
void check$jdk_vm_ci_services_Services$$loadSingle(Class<?> callerClass, Class<?> service, boolean required);
/// /////////////////
//
@ -232,10 +222,6 @@ public interface EntitlementChecker {
void check$java_net_ResponseCache$$setDefault(Class<?> callerClass, ResponseCache rc);
void check$java_net_spi_InetAddressResolverProvider$(Class<?> callerClass);
void check$java_net_spi_URLStreamHandlerProvider$(Class<?> callerClass);
void check$java_net_URL$(Class<?> callerClass, String protocol, String host, int port, String file, URLStreamHandler handler);
void check$java_net_URL$(Class<?> callerClass, URL context, String spec, URLStreamHandler handler);
@ -246,14 +232,14 @@ public interface EntitlementChecker {
void check$java_net_DatagramSocket$connect(Class<?> callerClass, DatagramSocket that, SocketAddress addr);
void check$java_net_DatagramSocket$send(Class<?> callerClass, DatagramSocket that, DatagramPacket p);
void check$java_net_DatagramSocket$receive(Class<?> callerClass, DatagramSocket that, DatagramPacket p);
void check$java_net_DatagramSocket$joinGroup(Class<?> callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni);
void check$java_net_DatagramSocket$leaveGroup(Class<?> callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni);
void check$java_net_DatagramSocket$receive(Class<?> callerClass, DatagramSocket that, DatagramPacket p);
void check$java_net_DatagramSocket$send(Class<?> callerClass, DatagramSocket that, DatagramPacket p);
void check$java_net_MulticastSocket$joinGroup(Class<?> callerClass, MulticastSocket that, InetAddress addr);
void check$java_net_MulticastSocket$joinGroup(Class<?> callerClass, MulticastSocket that, SocketAddress addr, NetworkInterface ni);
@ -264,6 +250,10 @@ public interface EntitlementChecker {
void check$java_net_MulticastSocket$send(Class<?> callerClass, MulticastSocket that, DatagramPacket p, byte ttl);
void check$java_net_spi_InetAddressResolverProvider$(Class<?> callerClass);
void check$java_net_spi_URLStreamHandlerProvider$(Class<?> callerClass);
// Binding/connecting ctor
void check$java_net_ServerSocket$(Class<?> callerClass, int port);
@ -495,24 +485,26 @@ public interface EntitlementChecker {
// File access
//
// old io (ie File)
void check$java_io_FileOutputStream$(Class<?> callerClass, File file);
void check$java_io_FileOutputStream$(Class<?> callerClass, File file, boolean append);
void check$java_io_FileOutputStream$(Class<?> callerClass, String name);
void check$java_io_FileOutputStream$(Class<?> callerClass, String name, boolean append);
void check$java_util_Scanner$(Class<?> callerClass, File source);
void check$java_util_Scanner$(Class<?> callerClass, File source, String charsetName);
void check$java_util_Scanner$(Class<?> callerClass, File source, Charset charset);
void check$java_io_FileOutputStream$(Class<?> callerClass, String name);
void check$java_io_FileOutputStream$(Class<?> callerClass, String name, boolean append);
void check$java_io_FileOutputStream$(Class<?> callerClass, File file);
void check$java_io_FileOutputStream$(Class<?> callerClass, File file, boolean append);
// nio
void check$java_nio_file_Files$$probeContentType(Class<?> callerClass, Path path);
void check$java_nio_file_Files$$setOwner(Class<?> callerClass, Path path, UserPrincipal principal);
// hand-wired methods
// file system providers
void checkNewInputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options);
}

View file

@ -66,8 +66,9 @@ public class EntitlementInitialization {
public static void initialize(Instrumentation inst) throws Exception {
manager = initChecker();
Map<MethodKey, CheckMethod> checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(EntitlementChecker.class));
var latestCheckerInterface = getVersionSpecificCheckerClass(EntitlementChecker.class);
Map<MethodKey, CheckMethod> checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(latestCheckerInterface));
var fileSystemProviderClass = FileSystems.getDefault().provider().getClass();
Stream.of(
INSTRUMENTATION_SERVICE.lookupImplementationMethod(
@ -83,7 +84,7 @@ public class EntitlementInitialization {
var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet());
Instrumenter instrumenter = INSTRUMENTATION_SERVICE.newInstrumenter(EntitlementChecker.class, checkMethods);
Instrumenter instrumenter = INSTRUMENTATION_SERVICE.newInstrumenter(latestCheckerInterface, checkMethods);
inst.addTransformer(new Transformer(instrumenter, classesToTransform), true);
inst.retransformClasses(findClassesToRetransform(inst.getAllLoadedClasses(), classesToTransform));
}
@ -130,23 +131,40 @@ public class EntitlementInitialization {
return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, AGENTS_PACKAGE_NAME, ENTITLEMENTS_MODULE);
}
private static ElasticsearchEntitlementChecker initChecker() {
final PolicyManager policyManager = createPolicyManager();
/**
* Returns the "most recent" checker class compatible with the current runtime Java version.
* For checkers, we have (optionally) version specific classes, each with a prefix (e.g. Java23).
* The mapping cannot be automatic, as it depends on the actual presence of these classes in the final Jar (see
* the various mainXX source sets).
*/
private static Class<?> getVersionSpecificCheckerClass(Class<?> baseClass) {
String packageName = baseClass.getPackageName();
String baseClassName = baseClass.getSimpleName();
int javaVersion = Runtime.version().feature();
final String classNamePrefix;
if (javaVersion >= 23) {
// All Java version from 23 onwards will be able to use che checks in the Java23EntitlementChecker interface and implementation
classNamePrefix = "Java23";
} else {
// For any other Java version, the basic EntitlementChecker interface and implementation contains all the supported checks
classNamePrefix = "";
}
final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker";
final String className = packageName + "." + classNamePrefix + baseClassName;
Class<?> clazz;
try {
clazz = Class.forName(className);
} catch (ClassNotFoundException e) {
throw new AssertionError("entitlement lib cannot find entitlement impl", e);
throw new AssertionError("entitlement lib cannot find entitlement class " + className, e);
}
return clazz;
}
private static ElasticsearchEntitlementChecker initChecker() {
final PolicyManager policyManager = createPolicyManager();
final Class<?> clazz = getVersionSpecificCheckerClass(ElasticsearchEntitlementChecker.class);
Constructor<?> constructor;
try {
constructor = clazz.getConstructor(PolicyManager.class);

View file

@ -84,6 +84,11 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
this.policyManager = policyManager;
}
/// /////////////////
//
// Exit the JVM process
//
@Override
public void check$java_lang_Runtime$exit(Class<?> callerClass, Runtime runtime, int status) {
policyManager.checkExitVM(callerClass);
@ -99,6 +104,11 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkExitVM(callerClass);
}
/// /////////////////
//
// create class loaders
//
@Override
public void check$java_lang_ClassLoader$(Class<?> callerClass) {
policyManager.checkCreateClassLoader(callerClass);
@ -114,21 +124,6 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_security_SecureClassLoader$(Class<?> callerClass) {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_security_SecureClassLoader$(Class<?> callerClass, ClassLoader parent) {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_security_SecureClassLoader$(Class<?> callerClass, String name, ClassLoader parent) {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_net_URLClassLoader$(Class<?> callerClass, URL[] urls) {
policyManager.checkCreateClassLoader(callerClass);
@ -160,6 +155,55 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_security_SecureClassLoader$(Class<?> callerClass) {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_security_SecureClassLoader$(Class<?> callerClass, ClassLoader parent) {
policyManager.checkCreateClassLoader(callerClass);
}
@Override
public void check$java_security_SecureClassLoader$(Class<?> callerClass, String name, ClassLoader parent) {
policyManager.checkCreateClassLoader(callerClass);
}
/// /////////////////
//
// "setFactory" methods
//
@Override
public void check$javax_net_ssl_HttpsURLConnection$setSSLSocketFactory(
Class<?> callerClass,
HttpsURLConnection connection,
SSLSocketFactory sf
) {
policyManager.checkSetHttpsConnectionProperties(callerClass);
}
@Override
public void check$javax_net_ssl_HttpsURLConnection$$setDefaultSSLSocketFactory(Class<?> callerClass, SSLSocketFactory sf) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$javax_net_ssl_HttpsURLConnection$$setDefaultHostnameVerifier(Class<?> callerClass, HostnameVerifier hv) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$javax_net_ssl_SSLContext$$setDefault(Class<?> callerClass, SSLContext context) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
/// /////////////////
//
// Process creation
//
@Override
public void check$java_lang_ProcessBuilder$start(Class<?> callerClass, ProcessBuilder processBuilder) {
policyManager.checkStartProcess(callerClass);
@ -170,6 +214,31 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkStartProcess(callerClass);
}
/// /////////////////
//
// System Properties and similar
//
@Override
public void check$java_lang_System$$clearProperty(Class<?> callerClass, String key) {
policyManager.checkWriteProperty(callerClass, key);
}
@Override
public void check$java_lang_System$$setProperties(Class<?> callerClass, Properties props) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$java_lang_System$$setProperty(Class<?> callerClass, String key, String value) {
policyManager.checkWriteProperty(callerClass, key);
}
/// /////////////////
//
// JVM-wide state changes
//
@Override
public void check$java_lang_System$$setIn(Class<?> callerClass, InputStream in) {
policyManager.checkChangeJVMGlobalState(callerClass);
@ -230,21 +299,6 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$java_lang_System$$clearProperty(Class<?> callerClass, String key) {
policyManager.checkWriteProperty(callerClass, key);
}
@Override
public void check$java_lang_System$$setProperty(Class<?> callerClass, String key, String value) {
policyManager.checkWriteProperty(callerClass, key);
}
@Override
public void check$java_lang_System$$setProperties(Class<?> callerClass, Properties props) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$java_util_spi_LocaleServiceProvider$(Class<?> callerClass) {
policyManager.checkChangeJVMGlobalState(callerClass);
@ -360,29 +414,10 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$javax_net_ssl_HttpsURLConnection$setSSLSocketFactory(
Class<?> callerClass,
HttpsURLConnection connection,
SSLSocketFactory sf
) {
policyManager.checkSetHttpsConnectionProperties(callerClass);
}
@Override
public void check$javax_net_ssl_HttpsURLConnection$$setDefaultSSLSocketFactory(Class<?> callerClass, SSLSocketFactory sf) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$javax_net_ssl_HttpsURLConnection$$setDefaultHostnameVerifier(Class<?> callerClass, HostnameVerifier hv) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void check$javax_net_ssl_SSLContext$$setDefault(Class<?> callerClass, SSLContext context) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
/// /////////////////
//
// Network access
//
@Override
public void check$java_net_ProxySelector$$setDefault(Class<?> callerClass, ProxySelector ps) {
@ -876,20 +911,12 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkLoadingNativeLibraries(callerClass);
}
@Override
public void check$java_util_Scanner$(Class<?> callerClass, File source) {
policyManager.checkFileRead(callerClass, source);
}
/// /////////////////
//
// File access
//
@Override
public void check$java_util_Scanner$(Class<?> callerClass, File source, String charsetName) {
policyManager.checkFileRead(callerClass, source);
}
@Override
public void check$java_util_Scanner$(Class<?> callerClass, File source, Charset charset) {
policyManager.checkFileRead(callerClass, source);
}
// old io (ie File)
@Override
public void check$java_io_FileOutputStream$(Class<?> callerClass, String name) {
@ -911,6 +938,23 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_util_Scanner$(Class<?> callerClass, File source) {
policyManager.checkFileRead(callerClass, source);
}
@Override
public void check$java_util_Scanner$(Class<?> callerClass, File source, String charsetName) {
policyManager.checkFileRead(callerClass, source);
}
@Override
public void check$java_util_Scanner$(Class<?> callerClass, File source, Charset charset) {
policyManager.checkFileRead(callerClass, source);
}
// nio
@Override
public void check$java_nio_file_Files$$probeContentType(Class<?> callerClass, Path path) {
policyManager.checkFileRead(callerClass, path);
@ -921,6 +965,8 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
policyManager.checkFileWrite(callerClass, path);
}
// file system providers
@Override
public void checkNewInputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options) {
// TODO: policyManger.checkFileSystemRead(path);

View file

@ -49,7 +49,8 @@ public class DataStreamsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase
.feature(FAILURE_STORE_ENABLED)
.setting("xpack.security.enabled", "true")
.keystore("bootstrap.password", "x-pack-test-password")
.user("x_pack_rest_user", "x-pack-test-password");
.user("x_pack_rest_user", "x-pack-test-password")
.systemProperty("es.queryable_built_in_roles_enabled", "false");
if (initTestSeed().nextBoolean()) {
clusterBuilder.setting("xpack.license.self_generated.type", "trial");
}

View file

@ -786,7 +786,7 @@ teardown:
- is_false: items.1.create.failure_store
---
"Test failure store status with bulk request":
"Test failure store status with bulk request failing on mappings":
- do:
allowed_warnings:
- "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation"
@ -865,3 +865,90 @@ teardown:
- match: { items.3.create.status: 400 }
- match: { items.3.create.error.type: document_parsing_exception }
- match: { items.3.create.failure_store: not_enabled }
---
"Test failure store status with bulk request failing in ingest":
- do:
ingest.put_pipeline:
id: "failing_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"fail": {
"message" : "error_message",
"tag": "foo-tag"
}
}
]
}
- match: { acknowledged: true }
- do:
allowed_warnings:
- "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation"
indices.put_index_template:
name: generic_logs_template
body:
index_patterns: logs-*
data_stream: {}
template:
settings:
number_of_shards: 1
number_of_replicas: 1
index:
default_pipeline: "failing_pipeline"
mappings:
properties:
'@timestamp':
type: date
count:
type: long
data_stream_options:
failure_store:
enabled: true
- do:
allowed_warnings:
- "index template [no-fs] has index patterns [no-fs*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [no-fs] will take precedence during new index creation"
indices.put_index_template:
name: no-fs
body:
index_patterns: no-fs*
data_stream: {}
template:
settings:
number_of_shards: 1
number_of_replicas: 0
index:
default_pipeline: "failing_pipeline"
mappings:
properties:
'@timestamp':
type: date
count:
type: long
data_stream_options:
failure_store:
enabled: false
- do:
bulk:
refresh: true
body:
- '{ "create": { "_index": "logs-foobar", "_id": "1" } }'
- '{ "@timestamp": "2022-01-01", "count": 1 }'
- '{ "create": { "_index": "no-fs", "_id": "1" } }'
- '{ "@timestamp": "2022-01-01", "count": 1 }'
- is_true: errors
# Successfully indexed to backing index
- match: { items.0.create._index: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' }
- match: { items.0.create.status: 201 }
- match: { items.0.create.failure_store: used }
# Rejected, eligible to go to failure store, but failure store not enabled
- match: { items.1.create._index: 'no-fs' }
- match: { items.1.create.status: 500 }
- match: { items.1.create.failure_store: not_enabled }
- match: { items.1.create.error.type: fail_processor_exception }
- contains: { items.1.create.error.reason: error_message }

View file

@ -147,9 +147,6 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/118914
- class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryRunAsIT
issue: https://github.com/elastic/elasticsearch/issues/115727
- class: org.elasticsearch.xpack.esql.action.EsqlNodeFailureIT
method: testFailureLoadingFields
issue: https://github.com/elastic/elasticsearch/issues/118000
- class: org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapperTests
method: testCartesianBoundsBlockLoader
issue: https://github.com/elastic/elasticsearch/issues/119201
@ -214,9 +211,6 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/120810
- class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT
issue: https://github.com/elastic/elasticsearch/issues/116126
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression}
issue: https://github.com/elastic/elasticsearch/issues/120890
- class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT
method: testAuthenticateShouldNotFallThroughInCaseOfFailure
issue: https://github.com/elastic/elasticsearch/issues/120902
@ -226,9 +220,6 @@ tests:
- class: org.elasticsearch.packaging.test.DockerTests
method: test140CgroupOsStatsAreAvailable
issue: https://github.com/elastic/elasticsearch/issues/120914
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias}
issue: https://github.com/elastic/elasticsearch/issues/120920
- class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT
method: testReservedStatePersistsOnRestart
issue: https://github.com/elastic/elasticsearch/issues/120923
@ -244,12 +235,6 @@ tests:
- class: org.elasticsearch.action.search.SearchProgressActionListenerIT
method: testSearchProgressWithQuery
issue: https://github.com/elastic/elasticsearch/issues/120994
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
method: test {p0=data_stream/80_resolve_index_data_streams/Resolve index with hidden and closed indices}
issue: https://github.com/elastic/elasticsearch/issues/120965
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter}
issue: https://github.com/elastic/elasticsearch/issues/121014
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSuggestProfilesWithName
issue: https://github.com/elastic/elasticsearch/issues/121022
@ -268,15 +253,9 @@ tests:
- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level}
issue: https://github.com/elastic/elasticsearch/issues/120950
- class: org.elasticsearch.xpack.shutdown.AllocationFailuresResetOnShutdownIT
method: testResetAllocationFailuresOnNodeShutdown
issue: https://github.com/elastic/elasticsearch/issues/121129
- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests
method: testActivateProfileForJWT
issue: https://github.com/elastic/elasticsearch/issues/120983
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cluster.health/20_request_timeout/cluster health request timeout waiting for active shards}
issue: https://github.com/elastic/elasticsearch/issues/121130
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testProfileIndexAutoCreation
issue: https://github.com/elastic/elasticsearch/issues/120987
@ -294,21 +273,9 @@ tests:
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSetEnabled
issue: https://github.com/elastic/elasticsearch/issues/121183
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cat.aliases/10_basic/Simple alias}
issue: https://github.com/elastic/elasticsearch/issues/121186
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testWithDatastreams
issue: https://github.com/elastic/elasticsearch/issues/121236
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=nodes.stats/11_indices_metrics/Metric - blank for indices mappings}
issue: https://github.com/elastic/elasticsearch/issues/121238
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=indices.get_alias/10_basic/Get aliases via /_alias/_all}
issue: https://github.com/elastic/elasticsearch/issues/121242
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cluster.stats/10_basic/Sparse vector stats}
issue: https://github.com/elastic/elasticsearch/issues/121246
- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT
method: testCrossClusterAsyncQueryStop
issue: https://github.com/elastic/elasticsearch/issues/121249
@ -335,15 +302,9 @@ tests:
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_357}
issue: https://github.com/elastic/elasticsearch/issues/121287
- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT
method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only}
issue: https://github.com/elastic/elasticsearch/issues/115475
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/index-modules/slowlog/line_102}
issue: https://github.com/elastic/elasticsearch/issues/121288
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/}
issue: https://github.com/elastic/elasticsearch/issues/121290
- class: org.elasticsearch.env.NodeEnvironmentTests
method: testGetBestDowngradeVersion
issue: https://github.com/elastic/elasticsearch/issues/121316
@ -365,12 +326,6 @@ tests:
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testActivateProfile
issue: https://github.com/elastic/elasticsearch/issues/121151
- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT
method: test {yaml=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only}
issue: https://github.com/elastic/elasticsearch/issues/121350
- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT
method: test {p0=search.vectors/42_knn_search_int4_flat/KNN Vector similarity search only}
issue: https://github.com/elastic/elasticsearch/issues/121395
- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/121407
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
@ -382,24 +337,15 @@ tests:
- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests
method: testGrantApiKeyForJWT
issue: https://github.com/elastic/elasticsearch/issues/121039
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cluster.health/10_basic/cluster health basic test}
issue: https://github.com/elastic/elasticsearch/issues/121478
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testGetUsersWithProfileUid
issue: https://github.com/elastic/elasticsearch/issues/121483
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cat.aliases/10_basic/Empty cluster}
issue: https://github.com/elastic/elasticsearch/issues/121484
- class: org.elasticsearch.xpack.transform.checkpoint.TransformCCSCanMatchIT
method: testTransformLifecycle_RangeQueryThatMatchesNoShards
issue: https://github.com/elastic/elasticsearch/issues/121480
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSuggestProfilesWithHint
issue: https://github.com/elastic/elasticsearch/issues/121116
- class: org.elasticsearch.xpack.sql.qa.single_node.JdbcDocCsvSpecIT
method: test {docs.testFilterToday}
issue: https://github.com/elastic/elasticsearch/issues/121474
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSuggestProfileWithData
issue: https://github.com/elastic/elasticsearch/issues/121258
@ -409,23 +355,12 @@ tests:
- class: org.elasticsearch.xpack.core.ilm.SetSingleNodeAllocateStepTests
method: testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs
issue: https://github.com/elastic/elasticsearch/issues/121495
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cat.aliases/40_hidden/Test cat aliases output with a visible index with a hidden alias}
issue: https://github.com/elastic/elasticsearch/issues/121128
- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only}
issue: https://github.com/elastic/elasticsearch/issues/121412
- class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests
issue: https://github.com/elastic/elasticsearch/issues/121294
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testDependentVariableIsAliasToKeyword
issue: https://github.com/elastic/elasticsearch/issues/121492
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cat.aliases/10_basic/Complex alias}
issue: https://github.com/elastic/elasticsearch/issues/121513
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=snapshot.create/10_basic/Create a snapshot for missing index}
issue: https://github.com/elastic/elasticsearch/issues/121536
- class: org.elasticsearch.xpack.esql.action.CrossClusterQueryUnavailableRemotesIT
method: testRemoteOnlyCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue
issue: https://github.com/elastic/elasticsearch/issues/121578
@ -441,6 +376,18 @@ tests:
- class: org.elasticsearch.xpack.esql.action.CrossClustersCancellationIT
method: testCancelSkipUnavailable
issue: https://github.com/elastic/elasticsearch/issues/121631
- class: org.elasticsearch.upgrades.UpgradeClusterClientYamlTestSuiteIT
method: test {p0=mixed_cluster/110_enrich/Enrich stats query smoke test for mixed cluster}
issue: https://github.com/elastic/elasticsearch/issues/121642
- class: org.elasticsearch.search.CrossClusterSearchUnavailableClusterIT
method: testSearchSkipUnavailable
issue: https://github.com/elastic/elasticsearch/issues/121497
- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryStopIT
method: testStopQueryLocal
issue: https://github.com/elastic/elasticsearch/issues/121672
- class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT
method: testLookupExplosionBigStringManyMatches
issue: https://github.com/elastic/elasticsearch/issues/121465
# Examples:
#

View file

@ -11,6 +11,7 @@ package org.elasticsearch.example;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse;
import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse.Indices;
import org.elasticsearch.xpack.core.security.authc.Authentication;
@ -85,10 +86,13 @@ public class CustomAuthorizationEngine implements AuthorizationEngine {
}
@Override
public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo,
public void authorizeIndexAction(
RequestInfo requestInfo,
AuthorizationInfo authorizationInfo,
AsyncSupplier<ResolvedIndices> indicesAsyncSupplier,
Map<String, IndexAbstraction> aliasOrIndexLookup,
ActionListener<IndexAuthorizationResult> listener) {
Metadata metadata,
ActionListener<IndexAuthorizationResult> listener
) {
if (isSuperuser(requestInfo.getAuthentication().getEffectiveSubject().getUser())) {
indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> {
Map<String, IndexAccessControl> indexAccessControlMap = new HashMap<>();

View file

@ -14,6 +14,7 @@ import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexAbstraction.ConcreteIndex;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.test.ESTestCase;
@ -31,6 +32,7 @@ import org.elasticsearch.xpack.core.security.user.User;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
import static org.hamcrest.Matchers.is;
@ -117,12 +119,13 @@ public class CustomAuthorizationEngineTests extends ESTestCase {
public void testAuthorizeIndexAction() {
CustomAuthorizationEngine engine = new CustomAuthorizationEngine();
Map<String, IndexAbstraction> indicesMap = new HashMap<>();
indicesMap.put("index", new ConcreteIndex(IndexMetadata.builder("index")
Metadata metadata = Metadata.builder().put(IndexMetadata.builder("index")
.settings(Settings.builder().put("index.version.created", IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.build(), null));
.build(),
false
).build();
// authorized
{
RequestInfo requestInfo =
@ -136,7 +139,7 @@ public class CustomAuthorizationEngineTests extends ESTestCase {
PlainActionFuture<IndexAuthorizationResult> resultFuture = new PlainActionFuture<>();
engine.authorizeIndexAction(requestInfo, authzInfo,
listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())),
indicesMap, resultFuture);
metadata, resultFuture);
IndexAuthorizationResult result = resultFuture.actionGet();
assertThat(result.isGranted(), is(true));
IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl();
@ -156,7 +159,7 @@ public class CustomAuthorizationEngineTests extends ESTestCase {
PlainActionFuture<IndexAuthorizationResult> resultFuture = new PlainActionFuture<>();
engine.authorizeIndexAction(requestInfo, authzInfo,
listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())),
indicesMap, resultFuture);
metadata, resultFuture);
IndexAuthorizationResult result = resultFuture.actionGet();
assertThat(result.isGranted(), is(false));
IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl();

View file

@ -101,8 +101,8 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
TransportSearchAction.TYPE.name(),
EsExecutors.DIRECT_EXECUTOR_SERVICE,
SearchRequest::new,
(request, channel, task) -> channel.sendResponse(
new SearchResponse(
(request, channel, task) -> {
var searchResponse = new SearchResponse(
SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN),
InternalAggregations.EMPTY,
null,
@ -117,8 +117,13 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
100,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
)
)
);
try {
channel.sendResponse(searchResponse);
} finally {
searchResponse.decRef();
}
}
);
newService.registerRequestHandler(
ClusterStateAction.NAME,

View file

@ -12,7 +12,9 @@ package org.elasticsearch.lucene;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.InputStreamEntity;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.MetadataIndexStateService;
@ -27,6 +29,7 @@ import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider;
import org.elasticsearch.test.cluster.local.distribution.DistributionType;
import org.elasticsearch.test.cluster.util.Version;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.ObjectPath;
import org.elasticsearch.xcontent.XContentType;
import org.hamcrest.Matcher;
import org.junit.After;
@ -161,8 +164,21 @@ public abstract class AbstractIndexCompatibilityTestCase extends ESRestTestCase
}
protected static Version indexVersion(String indexName) throws Exception {
var response = assertOK(client().performRequest(new Request("GET", "/" + indexName + "/_settings")));
int id = Integer.parseInt(createFromResponse(response).evaluate(indexName + ".settings.index.version.created"));
return indexVersion(indexName, false);
}
protected static Version indexVersion(String indexName, boolean ignoreWarnings) throws Exception {
Request request = new Request("GET", "/" + indexName + "/_settings");
request.addParameter("flat_settings", "true");
if (ignoreWarnings) {
RequestOptions.Builder options = request.getOptions().toBuilder();
options.setWarningsHandler(WarningsHandler.PERMISSIVE);
request.setOptions(options);
}
var response = assertOK(client().performRequest(request));
ObjectPath fromResponse = createFromResponse(response);
Map<String, Object> settings = fromResponse.evaluateExact(indexName, "settings");
int id = Integer.parseInt((String) settings.get("index.version.created"));
return new Version((byte) ((id / 1000000) % 100), (byte) ((id / 10000) % 100), (byte) ((id / 100) % 100));
}

View file

@ -0,0 +1,154 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.lucene;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.cluster.util.Version;
import org.elasticsearch.test.rest.ObjectPath;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class FullClusterRestartSystemIndexCompatibilityIT extends FullClusterRestartIndexCompatibilityTestCase {
static {
clusterConfig = config -> config.setting("xpack.license.self_generated.type", "trial");
}
public FullClusterRestartSystemIndexCompatibilityIT(Version version) {
super(version);
}
// we need a place to store async_search ids across cluster restarts
private static Map<String, String> async_search_ids = new HashMap<>(3);
/**
* 1. creates an index on N-2 and performs async_search on it that is kept in system index
* 2. After update to N-1 (latest) perform a system index migration step, also write block the index
* 3. on N, check that async search results are still retrievable and we can write to the system index
*/
public void testAsyncSearchIndexMigration() throws Exception {
final String index = suffix("index");
final String asyncSearchIndex = ".async-search";
final int numDocs = 2431;
final Request asyncSearchRequest = new Request("POST", "/" + index + "/_async_search?size=100&keep_on_completion=true");
if (isFullyUpgradedTo(VERSION_MINUS_2)) {
createIndex(
client(),
index,
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2))
.build()
);
indexDocs(index, numDocs);
ensureGreen(index);
assertThat(indexVersion(index), equalTo(VERSION_MINUS_2));
String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n-2_id");
ensureGreen(asyncSearchIndex);
assertAsyncSearchHitCount(asyncId, numDocs);
assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 1));
assertThat(indexVersion(asyncSearchIndex, true), equalTo(VERSION_MINUS_2));
return;
}
if (isFullyUpgradedTo(VERSION_MINUS_1)) {
// check .async-search index is readable
assertThat(indexVersion(asyncSearchIndex, true), equalTo(VERSION_MINUS_2));
assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs);
// migrate system indices
Request migrateRequest = new Request("POST", "/_migration/system_features");
assertThat(
ObjectPath.createFromResponse(client().performRequest(migrateRequest)).evaluate("features.0.feature_name"),
equalTo("async_search")
);
assertBusy(() -> {
Request checkMigrateProgress = new Request("GET", "/_migration/system_features");
Response resp = null;
try {
assertFalse(
ObjectPath.createFromResponse(client().performRequest(checkMigrateProgress))
.evaluate("migration_status")
.equals("IN_PROGRESS")
);
} catch (IOException e) {
throw new AssertionError("System feature migration failed", e);
}
});
// check search results from n-2 search are still readable
assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs);
// perform new async search and check its readable
String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n-1_id");
assertAsyncSearchHitCount(asyncId, numDocs);
assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 2));
// in order to move to current version we need write block for n-2 index
addIndexBlock(index, IndexMetadata.APIBlock.WRITE);
}
if (isFullyUpgradedTo(VERSION_CURRENT)) {
assertThat(indexVersion(index, true), equalTo(VERSION_MINUS_2));
assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs);
assertAsyncSearchHitCount(async_search_ids.get("n-1_id"), numDocs);
// check system index is still writeable
String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n_id");
assertAsyncSearchHitCount(asyncId, numDocs);
assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 3));
}
}
private static String searchAsyncAndStoreId(Request asyncSearchRequest, String asyncIdName) throws IOException {
ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(asyncSearchRequest));
String asyncId = resp.evaluate("id");
assertNotNull(asyncId);
async_search_ids.put(asyncIdName, asyncId);
return asyncId;
}
private static void assertAsyncSearchHitCount(String asyncId, int numDocs) throws IOException {
var asyncGet = new Request("GET", "/_async_search/" + asyncId);
ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(asyncGet));
assertEquals(Integer.valueOf(numDocs), resp.evaluate("response.hits.total.value"));
}
/**
* Assert that the index in question has the given number of documents present
*/
private static void assertDocCountNoWarnings(RestClient client, String indexName, long docCount) throws IOException {
Request countReq = new Request("GET", "/" + indexName + "/_count");
RequestOptions.Builder options = countReq.getOptions().toBuilder();
options.setWarningsHandler(WarningsHandler.PERMISSIVE);
countReq.setOptions(options);
ObjectPath resp = ObjectPath.createFromResponse(client.performRequest(countReq));
assertEquals(
"expected " + docCount + " documents but it was a different number",
docCount,
Long.parseLong(resp.evaluate("count").toString())
);
}
}

View file

@ -18,7 +18,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class SourceModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase {
@ -83,20 +82,10 @@ public class SourceModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase {
private void assertDeprecationWarningForTemplate(String templateName) throws IOException {
var request = new Request("GET", "/_migration/deprecations");
var response = entityAsMap(client().performRequest(request));
if (response.containsKey("templates")) {
// Check the newer version of the deprecation API that contains the templates section
assertThat(response.containsKey("templates"), equalTo(true));
Map<?, ?> issuesByTemplate = (Map<?, ?>) response.get("templates");
assertThat(issuesByTemplate.containsKey(templateName), equalTo(true));
var templateIssues = (List<?>) issuesByTemplate.get(templateName);
assertThat(((Map<?, ?>) templateIssues.getFirst()).get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING));
} else {
// Bwc version with 8.18 until https://github.com/elastic/elasticsearch/pull/120505/ gets backported, clean up after backport
var nodeSettings = (Map<?, ?>) ((List<?>) response.get("node_settings")).getFirst();
assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING));
assertThat(
(String) nodeSettings.get("details"),
containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]")
);
}
}
}

View file

@ -57,7 +57,7 @@ setup:
another_vector: [-0.5, 11.0, 0, 12]
- do:
indices.refresh: {}
indices.flush: { }
# For added test reliability, pending the resolution of https://github.com/elastic/elasticsearch/issues/109416.
- do:
@ -66,10 +66,6 @@ setup:
index: int4_flat
- do:
indices.refresh: {}
- do:
indices.forcemerge:
max_num_segments: 1
index: int4_flat
---
"kNN search only":
- do:
@ -203,13 +199,14 @@ setup:
num_candidates: 3
k: 3
field: vector
similarity: 10.3
# Set high allowed similarity, reduce once we can update underlying quantization algo
similarity: 110
query_vector: [-0.5, 90.0, -10, 14.8]
- length: {hits.hits: 1}
- is_true: hits.hits.0
- match: {hits.hits.0._id: "2"}
- match: {hits.hits.0.fields.name.0: "moose.jpg"}
#- match: {hits.hits.0._id: "2"}
#- match: {hits.hits.0.fields.name.0: "moose.jpg"}
---
"Vector similarity with filter only":
- do:
@ -221,7 +218,8 @@ setup:
num_candidates: 3
k: 3
field: vector
similarity: 11
# Set high allowed similarity, reduce once we can update underlying quantization algo
similarity: 110
query_vector: [-0.5, 90.0, -10, 14.8]
filter: {"term": {"name": "moose.jpg"}}

View file

@ -179,7 +179,7 @@ final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
* @param slot the slot in the bulk request to mark as failed.
* @param e the failure encountered.
*/
synchronized void markItemAsFailed(int slot, Exception e) {
synchronized void markItemAsFailed(int slot, Exception e, IndexDocFailureStoreStatus failureStoreStatus) {
final DocWriteRequest<?> docWriteRequest = bulkRequest.requests().get(slot);
final String id = Objects.requireNonNullElse(docWriteRequest.id(), DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID);
// We hit a error during preprocessing a request, so we:
@ -187,7 +187,7 @@ final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
// 2) Add a bulk item failure for this request
// 3) Continue with the next request in the bulk.
failedSlots.set(slot);
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(docWriteRequest.index(), id, e);
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(docWriteRequest.index(), id, e, failureStoreStatus);
itemResponses.add(BulkItemResponse.failure(slot, docWriteRequest.opType(), failure));
}
@ -223,7 +223,7 @@ final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
assert false
: "Attempting to route a failed write request type to a failure store but the failure store is not enabled! "
+ "This should be guarded against in TransportBulkAction#shouldStoreFailure()";
markItemAsFailed(slot, e);
markItemAsFailed(slot, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN);
} else {
// We get the index write request to find the source of the failed document
IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(bulkRequest.requests().get(slot));
@ -238,7 +238,7 @@ final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
+ "], index: ["
+ targetIndexName
+ "]";
markItemAsFailed(slot, e);
markItemAsFailed(slot, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN);
logger.debug(
() -> "Attempted to redirect an invalid write operation after ingest failure - type: ["
+ bulkRequest.requests().get(slot).getClass().getName()
@ -267,7 +267,7 @@ final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
+ "]",
ioException
);
markItemAsFailed(slot, e);
markItemAsFailed(slot, e, IndexDocFailureStoreStatus.FAILED);
}
}
}

View file

@ -23,8 +23,6 @@ import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.NotMasterException;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
@ -75,7 +73,6 @@ import java.util.Set;
import static org.apache.logging.log4j.Level.DEBUG;
import static org.apache.logging.log4j.Level.ERROR;
import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_REFRESH_BLOCK;
import static org.elasticsearch.cluster.service.MasterService.isPublishFailureException;
import static org.elasticsearch.core.Strings.format;
@ -627,7 +624,6 @@ public class ShardStateAction {
List<TaskContext<StartedShardUpdateTask>> tasksToBeApplied = new ArrayList<>();
List<ShardRouting> shardRoutingsToBeApplied = new ArrayList<>(batchExecutionContext.taskContexts().size());
Set<ShardRouting> seenShardRoutings = new HashSet<>(); // to prevent duplicates
Set<Index> indicesWithUnpromotableShardsStarted = null;
final Map<Index, ClusterStateTimeRanges> updatedTimestampRanges = new HashMap<>();
final ClusterState initialState = batchExecutionContext.initialState();
for (var taskContext : batchExecutionContext.taskContexts()) {
@ -751,14 +747,6 @@ public class ShardStateAction {
new ClusterStateTimeRanges(newTimestampMillisRange, newEventIngestedMillisRange)
);
}
if (matched.isPromotableToPrimary() == false
&& initialState.blocks().hasIndexBlock(projectId, index.getName(), INDEX_REFRESH_BLOCK)) {
if (indicesWithUnpromotableShardsStarted == null) {
indicesWithUnpromotableShardsStarted = new HashSet<>();
}
indicesWithUnpromotableShardsStarted.add(index);
}
}
}
}
@ -785,10 +773,7 @@ public class ShardStateAction {
maybeUpdatedState = ClusterState.builder(maybeUpdatedState).metadata(metadataBuilder).build();
}
maybeUpdatedState = maybeRemoveIndexRefreshBlocks(maybeUpdatedState, indicesWithUnpromotableShardsStarted);
assert assertStartedIndicesHaveCompleteTimestampRanges(maybeUpdatedState);
assert assertRefreshBlockIsNotPresentWhenTheIndexIsSearchable(maybeUpdatedState);
for (final var taskContext : tasksToBeApplied) {
final var task = taskContext.getTask();
@ -804,37 +789,6 @@ public class ShardStateAction {
return maybeUpdatedState;
}
private static ClusterState maybeRemoveIndexRefreshBlocks(
ClusterState clusterState,
@Nullable Set<Index> indicesWithUnpromotableShardsStarted
) {
// The provided cluster state must include the newly STARTED unpromotable shards
if (indicesWithUnpromotableShardsStarted == null) {
return clusterState;
}
ClusterBlocks.Builder clusterBlocksBuilder = null;
for (Index indexWithUnpromotableShardsStarted : indicesWithUnpromotableShardsStarted) {
String indexName = indexWithUnpromotableShardsStarted.getName();
var projectId = clusterState.metadata().projectFor(indexWithUnpromotableShardsStarted).id();
assert clusterState.blocks().hasIndexBlock(projectId, indexName, INDEX_REFRESH_BLOCK) : indexWithUnpromotableShardsStarted;
var indexRoutingTable = clusterState.routingTable(projectId).index(indexWithUnpromotableShardsStarted);
if (indexRoutingTable.readyForSearch()) {
if (clusterBlocksBuilder == null) {
clusterBlocksBuilder = ClusterBlocks.builder(clusterState.blocks());
}
clusterBlocksBuilder.removeIndexBlock(projectId, indexName, INDEX_REFRESH_BLOCK);
}
}
if (clusterBlocksBuilder == null) {
return clusterState;
}
return ClusterState.builder(clusterState).blocks(clusterBlocksBuilder).build();
}
private static boolean assertStartedIndicesHaveCompleteTimestampRanges(ClusterState clusterState) {
for (ProjectId projectId : clusterState.metadata().projects().keySet()) {
for (Map.Entry<String, IndexRoutingTable> cursor : clusterState.routingTable(projectId).getIndicesRouting().entrySet()) {
@ -860,18 +814,6 @@ public class ShardStateAction {
return true;
}
private static boolean assertRefreshBlockIsNotPresentWhenTheIndexIsSearchable(ClusterState clusterState) {
for (var projectId : clusterState.metadata().projects().keySet()) {
for (Map.Entry<String, Set<ClusterBlock>> indexBlock : clusterState.blocks().indices(projectId).entrySet()) {
if (indexBlock.getValue().contains(INDEX_REFRESH_BLOCK)) {
assert clusterState.routingTable(projectId).index(indexBlock.getKey()).readyForSearch() == false
: "Index [" + indexBlock.getKey() + "] is searchable but has an INDEX_REFRESH_BLOCK";
}
}
}
return true;
}
@Override
public void clusterStatePublished(ClusterState newClusterState) {
rerouteService.reroute(

View file

@ -3209,7 +3209,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
try {
doCheckIndex();
} catch (IOException e) {
if (ExceptionsHelper.unwrap(e, AlreadyClosedException.class) != null) {
// Cache-based read operations on Lucene files can throw an AlreadyClosedException wrapped into an IOException in case
// of evictions. We don't want to mark the store as corrupted for this.
} else {
store.markStoreCorrupted(e);
}
throw e;
} finally {
store.decRef();

View file

@ -21,6 +21,7 @@ import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.bulk.FailureStoreMetrics;
import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus;
import org.elasticsearch.action.bulk.TransportBulkAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
@ -767,12 +768,34 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
ExceptionsHelper.rethrowAndSuppress(exceptions);
}
private record IngestPipelinesExecutionResult(boolean success, boolean shouldKeep, Exception exception, String failedIndex) {
private record IngestPipelinesExecutionResult(
boolean success,
boolean shouldKeep,
Exception exception,
String failedIndex,
IndexDocFailureStoreStatus failureStoreStatus
) {
private static final IngestPipelinesExecutionResult SUCCESSFUL_RESULT = new IngestPipelinesExecutionResult(true, true, null, null);
private static final IngestPipelinesExecutionResult DISCARD_RESULT = new IngestPipelinesExecutionResult(true, false, null, null);
private static final IngestPipelinesExecutionResult SUCCESSFUL_RESULT = new IngestPipelinesExecutionResult(
true,
true,
null,
null,
IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
);
private static final IngestPipelinesExecutionResult DISCARD_RESULT = new IngestPipelinesExecutionResult(
true,
false,
null,
null,
IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
);
private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exception e) {
return new IngestPipelinesExecutionResult(false, true, e, index);
return new IngestPipelinesExecutionResult(false, true, e, index, IndexDocFailureStoreStatus.USED);
}
private static IngestPipelinesExecutionResult failWithoutStoringIn(String index, Exception e) {
return new IngestPipelinesExecutionResult(false, true, e, index, IndexDocFailureStoreStatus.NOT_ENABLED);
}
}
@ -804,7 +827,7 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
final IntConsumer onDropped,
final Function<String, Boolean> resolveFailureStore,
final TriConsumer<Integer, String, Exception> onStoreFailure,
final BiConsumer<Integer, Exception> onFailure,
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> onFailure,
final BiConsumer<Thread, Exception> onCompletion,
final Executor executor
) {
@ -860,19 +883,27 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
assert firstPipeline != null;
firstPipeline.getMetrics().postIngestBytes(indexRequest.ramBytesUsed());
}
} else {
totalMetrics.ingestFailed();
if (IndexDocFailureStoreStatus.NOT_ENABLED.equals(result.failureStoreStatus)) {
// A failure result, but despite the target being a data stream, it does not have failure
// storage enabled currently. Capture the status in the onFailure call and skip any further
// processing
onFailure.apply(slot, result.exception, result.failureStoreStatus);
} else {
// We were given a failure result in the onResponse method, so we must store the failure
// Recover the original document state, track a failed ingest, and pass it along
updateIndexRequestMetadata(indexRequest, originalDocumentMetadata);
totalMetrics.ingestFailed();
onStoreFailure.apply(slot, result.failedIndex, result.exception);
}
}
}
@Override
public void onFailure(Exception e) {
// The target of the request does not allow failure storage, or failed for unforeseen reason
totalMetrics.ingestFailed();
onFailure.accept(slot, e);
onFailure.apply(slot, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN);
}
},
() -> {
@ -1000,15 +1031,15 @@ public class IngestService implements ClusterStateApplier, ReportingService<Inge
if (failureStoreResolution != null && failureStoreResolution) {
failureStoreMetrics.incrementFailureStore(originalIndex, errorType, FailureStoreMetrics.ErrorLocation.PIPELINE);
listener.onResponse(IngestPipelinesExecutionResult.failAndStoreFor(originalIndex, e));
} else {
if (failureStoreResolution != null) {
} else if (failureStoreResolution != null) {
// If this document targeted a data stream that didn't have the failure store enabled, we increment
// the rejected counter.
// We also increment the total counter because this request will not reach the code that increments
// the total counter for non-rejected documents.
failureStoreMetrics.incrementTotal(originalIndex);
failureStoreMetrics.incrementRejected(originalIndex, errorType, FailureStoreMetrics.ErrorLocation.PIPELINE, false);
}
listener.onResponse(IngestPipelinesExecutionResult.failWithoutStoringIn(originalIndex, e));
} else {
listener.onFailure(e);
}
};

View file

@ -44,7 +44,7 @@ public class BulkRequestModifierTests extends ESTestCase {
for (int i = 0; modifier.hasNext(); i++) {
modifier.next();
if (randomBoolean()) {
modifier.markItemAsFailed(i, new RuntimeException());
modifier.markItemAsFailed(i, new RuntimeException(), randomFrom(IndexDocFailureStoreStatus.values()));
failedSlots.add(i);
}
}
@ -110,7 +110,7 @@ public class BulkRequestModifierTests extends ESTestCase {
// actually mark the failures
for (int i : failures) {
modifier.markItemAsFailed(i, new RuntimeException());
modifier.markItemAsFailed(i, new RuntimeException(), randomFrom(IndexDocFailureStoreStatus.values()));
}
// So half of the requests have "failed", so only the successful requests are left:

View file

@ -122,7 +122,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
@Captor
ArgumentCaptor<TriConsumer<Integer, String, Exception>> redirectHandler;
@Captor
ArgumentCaptor<BiConsumer<Integer, Exception>> failureHandler;
ArgumentCaptor<TriConsumer<Integer, Exception, IndexDocFailureStoreStatus>> failureHandler;
@Captor
ArgumentCaptor<BiConsumer<Thread, Exception>> completionHandler;
@Captor
@ -421,7 +421,8 @@ public class TransportBulkActionIngestTests extends ESTestCase {
// now check success
Iterator<DocWriteRequest<?>> req = bulkDocsItr.getValue().iterator();
failureHandler.getValue().accept(0, exception); // have an exception for our one index request
// have an exception for our one index request
failureHandler.getValue().apply(0, exception, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN);
indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
// ensure redirects on failure store data stream
assertTrue(redirectPredicate.getValue().apply(WITH_FAILURE_STORE_ENABLED + "-1"));
@ -520,7 +521,8 @@ public class TransportBulkActionIngestTests extends ESTestCase {
// now check success
Iterator<DocWriteRequest<?>> req = bulkDocsItr.getValue().iterator();
failureHandler.getValue().accept(0, exception); // have an exception for our one index request
// have an exception for our one index request
failureHandler.getValue().apply(0, exception, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN);
indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null);
assertTrue(action.isExecuted);

View file

@ -12,32 +12,25 @@ package org.elasticsearch.cluster.action.shard;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionTestUtils;
import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ESAllocationTestCase;
import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry;
import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardUpdateTask;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.ProjectMetadata;
import org.elasticsearch.cluster.routing.AllocationId;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.shard.IndexLongFieldRange;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardLongFieldRange;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
@ -45,7 +38,6 @@ import static org.elasticsearch.action.support.replication.ClusterStateCreationU
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithActivePrimary;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithNoShard;
import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_REFRESH_BLOCK;
import static org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@ -502,114 +494,6 @@ public class ShardStartedClusterStateTaskExecutorTests extends ESAllocationTestC
assertThat(latestIndexMetadata.getEventIngestedRange(), sameInstance(IndexLongFieldRange.UNKNOWN));
}
public void testIndexRefreshBlockIsClearedOnceTheIndexIsReadyToBeSearched() throws Exception {
final var indexName = "test";
final var numberOfShards = randomIntBetween(1, 4);
final var numberOfReplicas = randomIntBetween(1, 4);
var clusterState = ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicasWithState(
new String[] { indexName },
numberOfShards,
ShardRouting.Role.INDEX_ONLY,
IntStream.range(0, numberOfReplicas)
.mapToObj(unused -> Tuple.tuple(ShardRoutingState.UNASSIGNED, ShardRouting.Role.SEARCH_ONLY))
.toList()
);
clusterState = ClusterState.builder(clusterState)
.metadata(Metadata.builder(clusterState.metadata()).put(withActiveShardsInSyncAllocationIds(clusterState, indexName)))
.blocks(ClusterBlocks.builder(clusterState.blocks()).addIndexBlock(indexName, INDEX_REFRESH_BLOCK))
.build();
while (clusterState.blocks().hasIndexBlock(indexName, INDEX_REFRESH_BLOCK)) {
clusterState = maybeInitializeUnassignedReplicaShard(clusterState);
final IndexMetadata indexMetadata = clusterState.metadata().getProject().index(indexName);
final var initializingReplicaShardOpt = clusterState.routingTable()
.allShards()
.filter(shardRouting -> shardRouting.isPromotableToPrimary() == false)
.filter(shardRouting -> shardRouting.state().equals(ShardRoutingState.INITIALIZING))
.findFirst();
assertThat(clusterState.routingTable().allShards().toList().toString(), initializingReplicaShardOpt.isPresent(), is(true));
var initializingReplicaShard = initializingReplicaShardOpt.get();
final var shardId = initializingReplicaShard.shardId();
final var primaryTerm = indexMetadata.primaryTerm(shardId.id());
final var replicaAllocationId = initializingReplicaShard.allocationId().getId();
final var task = new StartedShardUpdateTask(
new StartedShardEntry(
shardId,
replicaAllocationId,
primaryTerm,
"test",
ShardLongFieldRange.UNKNOWN,
ShardLongFieldRange.UNKNOWN
),
createTestListener()
);
final var resultingState = executeTasks(clusterState, List.of(task));
assertNotSame(clusterState, resultingState);
clusterState = resultingState;
}
var indexRoutingTable = clusterState.routingTable().index(indexName);
assertThat(indexRoutingTable.readyForSearch(), is(true));
for (int i = 0; i < numberOfShards; i++) {
var shardRoutingTable = indexRoutingTable.shard(i);
assertThat(shardRoutingTable, is(notNullValue()));
// Ensure that at least one unpromotable shard is either STARTED or RELOCATING
assertThat(shardRoutingTable.unpromotableShards().isEmpty(), is(false));
}
assertThat(clusterState.blocks().hasIndexBlock(indexName, INDEX_REFRESH_BLOCK), is(false));
}
private static ClusterState maybeInitializeUnassignedReplicaShard(ClusterState clusterState) {
var unassignedShardRoutingOpt = clusterState.routingTable()
.allShards()
.filter(shardRouting -> shardRouting.state().equals(ShardRoutingState.UNASSIGNED))
.findFirst();
if (unassignedShardRoutingOpt.isEmpty()) {
return clusterState;
}
var unassignedShardRouting = unassignedShardRoutingOpt.get();
var initializedShard = unassignedShardRouting.initialize(randomUUID(), null, 1);
RoutingTable routingTable = clusterState.routingTable();
IndexRoutingTable indexRoutingTable = routingTable.index(unassignedShardRouting.getIndexName());
IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex());
for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId);
for (int copy = 0; copy < shardRoutingTable.size(); copy++) {
ShardRouting shardRouting = shardRoutingTable.shard(copy);
newIndexRoutingTable.addShard(shardRouting == unassignedShardRouting ? initializedShard : shardRouting);
}
}
routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build();
return ClusterState.builder(clusterState).routingTable(routingTable).build();
}
private static IndexMetadata.Builder withActiveShardsInSyncAllocationIds(ClusterState clusterState, String indexName) {
IndexMetadata.Builder indexMetadataBuilder = new IndexMetadata.Builder(clusterState.metadata().getProject().index(indexName));
var indexRoutingTable = clusterState.routingTable().index(indexName);
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable.allShards().toList()) {
indexMetadataBuilder.putInSyncAllocationIds(
indexShardRoutingTable.shardId().getId(),
indexShardRoutingTable.activeShards()
.stream()
.map(ShardRouting::allocationId)
.map(AllocationId::getId)
.collect(Collectors.toSet())
);
}
return indexMetadataBuilder;
}
private ClusterState executeTasks(final ClusterState state, final List<StartedShardUpdateTask> tasks) throws Exception {
return ClusterStateTaskExecutorUtils.executeAndAssertSuccessful(state, executor, tasks);
}

View file

@ -18,6 +18,7 @@ import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.FailureStoreMetrics;
import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus;
import org.elasticsearch.action.bulk.TransportBulkAction;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
@ -123,7 +124,6 @@ import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
@ -206,12 +206,18 @@ public class IngestServiceTests extends ESTestCase {
.setPipeline("_id")
.setFinalPipeline("_none");
Boolean noRedirect = randomBoolean() ? false : null;
IndexDocFailureStoreStatus fsStatus = noRedirect == null
? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
: IndexDocFailureStoreStatus.NOT_ENABLED;
final SetOnce<Boolean> failure = new SetOnce<>();
final BiConsumer<Integer, Exception> failureHandler = (slot, e) -> {
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = (slot, e, status) -> {
failure.set(true);
assertThat(slot, equalTo(0));
assertThat(e, instanceOf(IllegalArgumentException.class));
assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist"));
assertThat(status, equalTo(fsStatus));
};
@SuppressWarnings("unchecked")
@ -222,7 +228,7 @@ public class IngestServiceTests extends ESTestCase {
1,
List.of(indexRequest),
indexReq -> {},
(s) -> false,
(s) -> noRedirect,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
failureHandler,
completionHandler,
@ -1173,11 +1179,17 @@ public class IngestServiceTests extends ESTestCase {
IndexRequest indexRequest2 = new IndexRequest("_index").id("_id2").source(Map.of()).setPipeline(id).setFinalPipeline("_none");
bulkRequest.add(indexRequest2);
final BiConsumer<Integer, Exception> failureHandler = (slot, e) -> {
Boolean noRedirect = randomBoolean() ? false : null;
IndexDocFailureStoreStatus fsStatus = noRedirect == null
? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
: IndexDocFailureStoreStatus.NOT_ENABLED;
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = (slot, e, status) -> {
assertThat(e.getCause(), instanceOf(IllegalStateException.class));
assertThat(e.getCause().getMessage(), equalTo("error"));
failure.set(true);
assertThat(slot, equalTo(1));
assertThat(status, equalTo(fsStatus));
};
@SuppressWarnings("unchecked")
@ -1188,7 +1200,7 @@ public class IngestServiceTests extends ESTestCase {
bulkRequest.numberOfActions(),
bulkRequest.requests(),
indexReq -> {},
(s) -> false,
(s) -> noRedirect,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
failureHandler,
completionHandler,
@ -1225,23 +1237,30 @@ public class IngestServiceTests extends ESTestCase {
.setFinalPipeline("_none");
bulkRequest.add(indexRequest3);
@SuppressWarnings("unchecked")
BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
Boolean noRedirect = randomBoolean() ? false : null;
IndexDocFailureStoreStatus fsStatus = noRedirect == null
? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
: IndexDocFailureStoreStatus.NOT_ENABLED;
ingestService.executeBulkRequest(
projectId,
bulkRequest.numberOfActions(),
bulkRequest.requests(),
indexReq -> {},
(s) -> false,
(s) -> noRedirect,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
failureHandler,
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(failureHandler, times(1)).accept(
verify(failureHandler, times(1)).apply(
argThat(item -> item == 2),
argThat(iae -> "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage()))
argThat(iae -> "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage())),
argThat(fsStatus::equals)
);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1263,7 +1282,7 @@ public class IngestServiceTests extends ESTestCase {
.setPipeline("_id")
.setFinalPipeline("_none");
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1277,7 +1296,7 @@ public class IngestServiceTests extends ESTestCase {
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(failureHandler, never()).accept(any(), any());
verifyNoInteractions(failureHandler);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1306,7 +1325,9 @@ public class IngestServiceTests extends ESTestCase {
.setPipeline("_id")
.setFinalPipeline("_none");
CountDownLatch latch = new CountDownLatch(1);
final BiConsumer<Integer, Exception> failureHandler = (v, e) -> { throw new AssertionError("must never fail", e); };
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = (v, e, s) -> {
throw new AssertionError("must never fail", e);
};
final BiConsumer<Thread, Exception> completionHandler = (t, e) -> latch.countDown();
ingestService.executeBulkRequest(
projectId,
@ -1339,7 +1360,7 @@ public class IngestServiceTests extends ESTestCase {
.setPipeline("_id")
.setFinalPipeline("_none");
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1353,7 +1374,7 @@ public class IngestServiceTests extends ESTestCase {
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(failureHandler, never()).accept(any(), any());
verifyNoInteractions(failureHandler);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1402,7 +1423,7 @@ public class IngestServiceTests extends ESTestCase {
.setPipeline("_id")
.setFinalPipeline("_none");
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1417,7 +1438,7 @@ public class IngestServiceTests extends ESTestCase {
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(processor).execute(any(), any());
verify(failureHandler, never()).accept(any(), any());
verifyNoInteractions(failureHandler);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
assertThat(indexRequest.index(), equalTo("update_index"));
assertThat(indexRequest.id(), equalTo("update_id"));
@ -1456,22 +1477,27 @@ public class IngestServiceTests extends ESTestCase {
doThrow(new RuntimeException()).when(processor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any());
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
Boolean noRedirect = randomBoolean() ? false : null;
IndexDocFailureStoreStatus fsStatus = noRedirect == null
? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
: IndexDocFailureStoreStatus.NOT_ENABLED;
ingestService.executeBulkRequest(
projectId,
1,
List.of(indexRequest),
indexReq -> {},
(s) -> false,
(s) -> noRedirect,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
failureHandler,
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any());
verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class));
verify(failureHandler, times(1)).apply(eq(0), any(RuntimeException.class), eq(fsStatus));
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1514,7 +1540,7 @@ public class IngestServiceTests extends ESTestCase {
.setPipeline("_id")
.setFinalPipeline("_none");
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1528,7 +1554,7 @@ public class IngestServiceTests extends ESTestCase {
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class));
verifyNoInteractions(failureHandler);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1566,22 +1592,28 @@ public class IngestServiceTests extends ESTestCase {
doThrow(new RuntimeException()).when(processor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any());
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
Boolean noRedirect = randomBoolean() ? false : null;
IndexDocFailureStoreStatus fsStatus = noRedirect == null
? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
: IndexDocFailureStoreStatus.NOT_ENABLED;
ingestService.executeBulkRequest(
projectId,
1,
List.of(indexRequest),
indexReq -> {},
(s) -> false,
(s) -> noRedirect,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
failureHandler,
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any());
verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class));
verify(failureHandler, times(1)).apply(eq(0), any(RuntimeException.class), eq(fsStatus));
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1629,22 +1661,28 @@ public class IngestServiceTests extends ESTestCase {
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
@SuppressWarnings("unchecked")
BiConsumer<Integer, Exception> requestItemErrorHandler = mock(BiConsumer.class);
TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> requestItemErrorHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
Boolean noRedirect = randomBoolean() ? false : null;
IndexDocFailureStoreStatus fsStatus = noRedirect == null
? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN
: IndexDocFailureStoreStatus.NOT_ENABLED;
ingestService.executeBulkRequest(
projectId,
numRequest,
bulkRequest.requests(),
indexReq -> {},
(s) -> false,
(s) -> noRedirect,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
requestItemErrorHandler,
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(e -> e.getCause().equals(error)));
verify(requestItemErrorHandler, times(numIndexRequests)).apply(anyInt(), argThat(e -> e.getCause().equals(error)), eq(fsStatus));
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
@ -1679,7 +1717,7 @@ public class IngestServiceTests extends ESTestCase {
@SuppressWarnings("unchecked")
final TriConsumer<Integer, String, Exception> redirectHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1699,6 +1737,53 @@ public class IngestServiceTests extends ESTestCase {
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
public void testExecuteFailureStatusOnFailureWithoutRedirection() throws Exception {
final CompoundProcessor processor = mockCompoundProcessor();
IngestService ingestService = createWithProcessors(
Map.of(
"mock",
(factories, tag, description, config) -> processor,
"set",
(factories, tag, description, config) -> new FakeProcessor("set", "", "", (ingestDocument) -> fail())
)
);
PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}");
// given that set -> fail() above, it's a failure if a document executes against this pipeline
PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", "{\"processors\": [{\"set\" : {}}]}");
ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty
ClusterState previousClusterState = clusterState;
clusterState = executePut(putRequest1, clusterState);
clusterState = executePut(putRequest2, clusterState);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
final IndexRequest indexRequest = new IndexRequest("_index").id("_id")
.source(Map.of())
.setPipeline("_id1")
.setFinalPipeline("_id2");
doThrow(new RuntimeException()).when(processor)
.execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any());
final Function<String, Boolean> redirectCheck = (idx) -> indexRequest.index().equals(idx) ? false : null;
@SuppressWarnings("unchecked")
final TriConsumer<Integer, String, Exception> redirectHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
1,
List.of(indexRequest),
indexReq -> {},
redirectCheck,
redirectHandler,
failureHandler,
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any());
verifyNoInteractions(redirectHandler);
verify(failureHandler, times(1)).apply(eq(0), any(RuntimeException.class), eq(IndexDocFailureStoreStatus.NOT_ENABLED));
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception {
final Processor processor = mock(Processor.class);
when(processor.isAsync()).thenReturn(true);
@ -1736,7 +1821,7 @@ public class IngestServiceTests extends ESTestCase {
@SuppressWarnings("unchecked")
final TriConsumer<Integer, String, Exception> redirectHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1802,7 +1887,7 @@ public class IngestServiceTests extends ESTestCase {
@SuppressWarnings("unchecked")
TriConsumer<Integer, String, Exception> requestItemRedirectHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
BiConsumer<Integer, Exception> requestItemErrorHandler = mock(BiConsumer.class);
TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> requestItemErrorHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1867,7 +1952,7 @@ public class IngestServiceTests extends ESTestCase {
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
@SuppressWarnings("unchecked")
BiConsumer<Integer, Exception> requestItemErrorHandler = mock(BiConsumer.class);
TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> requestItemErrorHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(
@ -1882,7 +1967,7 @@ public class IngestServiceTests extends ESTestCase {
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(requestItemErrorHandler, never()).accept(any(), any());
verifyNoInteractions(requestItemErrorHandler);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
for (int i = 0; i < bulkRequest.requests().size(); i++) {
DocWriteRequest<?> docWriteRequest = bulkRequest.requests().get(i);
@ -1986,7 +2071,7 @@ public class IngestServiceTests extends ESTestCase {
indexReq -> {},
(s) -> false,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
(integer, e) -> {},
(integer, e, status) -> {},
(thread, e) -> {},
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
@ -2054,7 +2139,7 @@ public class IngestServiceTests extends ESTestCase {
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
@ -2280,7 +2365,7 @@ public class IngestServiceTests extends ESTestCase {
bulkRequest.add(indexRequest2);
@SuppressWarnings("unchecked")
final BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
final TriConsumer<Integer, Exception, IndexDocFailureStoreStatus> failureHandler = mock(TriConsumer.class);
@SuppressWarnings("unchecked")
final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked")
@ -2296,7 +2381,7 @@ public class IngestServiceTests extends ESTestCase {
completionHandler,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
verify(failureHandler, never()).accept(any(), any());
verifyNoInteractions(failureHandler);
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
verify(dropHandler, times(1)).accept(1);
}
@ -2382,7 +2467,7 @@ public class IngestServiceTests extends ESTestCase {
indexReq -> {},
(s) -> false,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
(integer, e) -> {},
(integer, e, status) -> {},
(thread, e) -> {},
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
@ -2458,7 +2543,7 @@ public class IngestServiceTests extends ESTestCase {
indexReq -> {},
(s) -> false,
(slot, targetIndex, e) -> fail("Should not be redirecting failures"),
(integer, e) -> {},
(integer, e, status) -> {},
(thread, e) -> {},
EsExecutors.DIRECT_EXECUTOR_SERVICE
);

View file

@ -33,6 +33,7 @@ dependencies {
api "org.elasticsearch:mocksocket:${versions.mocksocket}"
testImplementation project(':x-pack:plugin:mapper-unsigned-long')
testImplementation project(':x-pack:plugin:mapper-counted-keyword')
testImplementation project(":modules:mapper-extras")
}

View file

@ -460,15 +460,11 @@ public class ClusterStateCreationUtils {
int numberOfShards,
int numberOfReplicas
) {
return stateWithAssignedPrimariesAndReplicasWithState(
return stateWithAssignedPrimariesAndReplicas(
projectId,
indices,
numberOfShards,
ShardRouting.Role.DEFAULT,
Collections.nCopies(numberOfReplicas, ShardRouting.Role.DEFAULT)
.stream()
.map(role -> Tuple.tuple(ShardRoutingState.STARTED, role))
.toList()
);
}
@ -480,53 +476,19 @@ public class ClusterStateCreationUtils {
int numberOfShards,
List<ShardRouting.Role> replicaRoles
) {
return stateWithAssignedPrimariesAndReplicasWithState(
indices,
numberOfShards,
replicaRoles.stream().map(role -> Tuple.tuple(ShardRoutingState.STARTED, role)).toList()
);
return stateWithAssignedPrimariesAndReplicas(Metadata.DEFAULT_PROJECT_ID, indices, numberOfShards, replicaRoles);
}
/**
* Creates cluster state with several indexes, shards and replicas (with given roles and state) and all primary shards STARTED.
* Creates cluster state with several indexes, shards and replicas (with given roles) and all shards STARTED.
*/
public static ClusterState stateWithAssignedPrimariesAndReplicasWithState(
String[] indices,
int numberOfShards,
List<Tuple<ShardRoutingState, ShardRouting.Role>> replicaRoleAndStates
) {
return stateWithAssignedPrimariesAndReplicasWithState(indices, numberOfShards, ShardRouting.Role.DEFAULT, replicaRoleAndStates);
}
/**
* Creates cluster state with several indexes, shards and replicas (with given roles and state) and all primary shards STARTED.
*/
public static ClusterState stateWithAssignedPrimariesAndReplicasWithState(
String[] indices,
int numberOfShards,
ShardRouting.Role primaryRole,
List<Tuple<ShardRoutingState, ShardRouting.Role>> replicasStateAndRoles
) {
return stateWithAssignedPrimariesAndReplicasWithState(
Metadata.DEFAULT_PROJECT_ID,
indices,
numberOfShards,
primaryRole,
replicasStateAndRoles
);
}
/**
* Creates cluster state with several indexes, shards and replicas (with given roles and state) and all primary shards STARTED.
*/
public static ClusterState stateWithAssignedPrimariesAndReplicasWithState(
public static ClusterState stateWithAssignedPrimariesAndReplicas(
ProjectId projectId,
String[] indices,
int numberOfShards,
ShardRouting.Role primaryRole,
List<Tuple<ShardRoutingState, ShardRouting.Role>> replicasStateAndRoles
List<ShardRouting.Role> replicaRoles
) {
int numberOfDataNodes = replicasStateAndRoles.size() + 1;
int numberOfDataNodes = replicaRoles.size() + 1;
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
for (int i = 0; i < numberOfDataNodes + 1; i++) {
final DiscoveryNode node = newNode(i);
@ -546,7 +508,7 @@ public class ClusterStateCreationUtils {
for (String index : indices) {
IndexMetadata indexMetadata = IndexMetadata.builder(index)
.settings(
indexSettings(IndexVersion.current(), numberOfShards, replicasStateAndRoles.size()).put(
indexSettings(IndexVersion.current(), numberOfShards, replicaRoles.size()).put(
SETTING_CREATION_DATE,
System.currentTimeMillis()
)
@ -560,19 +522,14 @@ public class ClusterStateCreationUtils {
final ShardId shardId = new ShardId(index, "_na_", i);
IndexShardRoutingTable.Builder indexShardRoutingBuilder = IndexShardRoutingTable.builder(shardId);
indexShardRoutingBuilder.addShard(
shardRoutingBuilder(index, i, newNode(0).getId(), true, ShardRoutingState.STARTED).withRole(primaryRole).build()
TestShardRouting.newShardRouting(index, i, newNode(0).getId(), null, true, ShardRoutingState.STARTED)
);
for (int replica = 0; replica < replicasStateAndRoles.size(); replica++) {
var replicaStateAndRole = replicasStateAndRoles.get(replica);
ShardRoutingState shardRoutingState = replicaStateAndRole.v1();
String currentNodeId = shardRoutingState.equals(ShardRoutingState.UNASSIGNED) ? null : newNode(replica + 1).getId();
var shardRoutingBuilder = shardRoutingBuilder(index, i, currentNodeId, false, shardRoutingState).withRole(
replicaStateAndRole.v2()
for (int replica = 0; replica < replicaRoles.size(); replica++) {
indexShardRoutingBuilder.addShard(
shardRoutingBuilder(index, i, newNode(replica + 1).getId(), false, ShardRoutingState.STARTED).withRole(
replicaRoles.get(replica)
).build()
);
if (shardRoutingState.equals(ShardRoutingState.RELOCATING)) {
shardRoutingBuilder.withRelocatingNodeId(DiscoveryNodeUtils.create("relocating_" + replica).getId());
}
indexShardRoutingBuilder.addShard(shardRoutingBuilder.build());
}
indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder);
}

View file

@ -11,6 +11,7 @@ package org.elasticsearch.logsdb.datageneration;
import org.elasticsearch.logsdb.datageneration.datasource.DataSource;
import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator;
import org.elasticsearch.logsdb.datageneration.fields.leaf.CountedKeywordFieldDataGenerator;
import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator;
import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator;
import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator;
@ -34,7 +35,8 @@ public enum FieldType {
DOUBLE("double"),
FLOAT("float"),
HALF_FLOAT("half_float"),
SCALED_FLOAT("scaled_float");
SCALED_FLOAT("scaled_float"),
COUNTED_KEYWORD("counted_keyword");
private final String name;
@ -54,6 +56,7 @@ public enum FieldType {
case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource);
case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource);
case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource);
case COUNTED_KEYWORD -> new CountedKeywordFieldDataGenerator(fieldName, dataSource);
};
}

View file

@ -54,6 +54,10 @@ public interface DataSourceHandler {
return null;
}
default DataSourceResponse.RepeatingWrapper handle(DataSourceRequest.RepeatingWrapper request) {
return null;
}
default DataSourceResponse.ChildFieldGenerator handle(DataSourceRequest.ChildFieldGenerator request) {
return null;
}

View file

@ -85,6 +85,12 @@ public interface DataSourceRequest<TResponse extends DataSourceResponse> {
}
}
record RepeatingWrapper() implements DataSourceRequest<DataSourceResponse.RepeatingWrapper> {
public DataSourceResponse.RepeatingWrapper accept(DataSourceHandler handler) {
return handler.handle(this);
}
}
record ChildFieldGenerator(DataGeneratorSpecification specification)
implements
DataSourceRequest<DataSourceResponse.ChildFieldGenerator> {

View file

@ -39,6 +39,8 @@ public interface DataSourceResponse {
record ArrayWrapper(Function<Supplier<Object>, Supplier<Object>> wrapper) implements DataSourceResponse {}
record RepeatingWrapper(Function<Supplier<Object>, Supplier<Object>> wrapper) implements DataSourceResponse {}
interface ChildFieldGenerator extends DataSourceResponse {
int generateChildFieldCount();

View file

@ -33,6 +33,7 @@ public class DefaultMappingParametersHandler implements DataSourceHandler {
case KEYWORD -> keywordMapping(request, map);
case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> plain(map);
case SCALED_FLOAT -> scaledFloatMapping(map);
case COUNTED_KEYWORD -> plain(Map.of("index", ESTestCase.randomBoolean()));
});
}

View file

@ -11,6 +11,7 @@ package org.elasticsearch.logsdb.datageneration.datasource;
import org.elasticsearch.test.ESTestCase;
import java.util.HashSet;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.IntStream;
@ -26,6 +27,11 @@ public class DefaultWrappersHandler implements DataSourceHandler {
return new DataSourceResponse.ArrayWrapper(wrapInArray());
}
@Override
public DataSourceResponse.RepeatingWrapper handle(DataSourceRequest.RepeatingWrapper ignored) {
return new DataSourceResponse.RepeatingWrapper(repeatValues());
}
private static Function<Supplier<Object>, Supplier<Object>> injectNulls() {
// Inject some nulls but majority of data should be non-null (as it likely is in reality).
return (values) -> () -> ESTestCase.randomDouble() <= 0.05 ? null : values.get();
@ -41,4 +47,19 @@ public class DefaultWrappersHandler implements DataSourceHandler {
return values.get();
};
}
private static Function<Supplier<Object>, Supplier<Object>> repeatValues() {
return (values) -> {
HashSet<Object> previousValues = new HashSet<>();
return () -> {
if (previousValues.size() > 0 && ESTestCase.randomBoolean()) {
return ESTestCase.randomFrom(previousValues);
} else {
var value = values.get();
previousValues.add(value);
return value;
}
};
};
}
}

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.logsdb.datageneration.fields.leaf;
import org.elasticsearch.logsdb.datageneration.FieldDataGenerator;
import org.elasticsearch.logsdb.datageneration.datasource.DataSource;
import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest;
import java.util.HashSet;
import java.util.Set;
import java.util.function.Supplier;
public class CountedKeywordFieldDataGenerator implements FieldDataGenerator {
private final Supplier<Object> valueGenerator;
private final Set<String> previousStrings = new HashSet<>();
public CountedKeywordFieldDataGenerator(String fieldName, DataSource dataSource) {
var strings = dataSource.get(new DataSourceRequest.StringGenerator());
var nulls = dataSource.get(new DataSourceRequest.NullWrapper());
var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper());
var repeats = dataSource.get(new DataSourceRequest.RepeatingWrapper());
this.valueGenerator = arrays.wrapper().compose(nulls.wrapper().compose(repeats.wrapper())).apply(() -> strings.generator().get());
}
@Override
public Object generateValue() {
return valueGenerator.get();
}
}

View file

@ -15,6 +15,7 @@ import org.elasticsearch.logsdb.datageneration.matchers.MatchResult;
import org.elasticsearch.xcontent.XContentBuilder;
import java.math.BigInteger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -223,4 +224,68 @@ interface FieldSpecificMatcher {
return (BigInteger) value;
}
}
class CountedKeywordMatcher implements FieldSpecificMatcher {
private final XContentBuilder actualMappings;
private final Settings.Builder actualSettings;
private final XContentBuilder expectedMappings;
private final Settings.Builder expectedSettings;
CountedKeywordMatcher(
XContentBuilder actualMappings,
Settings.Builder actualSettings,
XContentBuilder expectedMappings,
Settings.Builder expectedSettings
) {
this.actualMappings = actualMappings;
this.actualSettings = actualSettings;
this.expectedMappings = expectedMappings;
this.expectedSettings = expectedSettings;
}
private static List<String> normalize(List<Object> values) {
return values.stream().filter(Objects::nonNull).map(it -> (String) it).toList();
}
private static boolean matchCountsEqualExact(List<String> actualNormalized, List<String> expectedNormalized) {
HashMap<String, Integer> counts = new HashMap<>();
for (String value : actualNormalized) {
counts.put(value, counts.getOrDefault(value, 0) + 1);
}
for (String value : expectedNormalized) {
int newCount = counts.getOrDefault(value, 0) - 1;
if (newCount == 0) {
counts.remove(value);
} else {
counts.put(value, newCount);
}
}
return counts.isEmpty();
}
@Override
public MatchResult match(
List<Object> actual,
List<Object> expected,
Map<String, Object> actualMapping,
Map<String, Object> expectedMapping
) {
var actualNormalized = normalize(actual);
var expectedNormalized = normalize(expected);
return matchCountsEqualExact(actualNormalized, expectedNormalized)
? MatchResult.match()
: MatchResult.noMatch(
formatErrorMessage(
actualMappings,
actualSettings,
expectedMappings,
expectedSettings,
"Values of type [counted_keyword] don't match after normalization, normalized"
+ prettyPrintCollections(actualNormalized, expectedNormalized)
)
);
}
}
}

View file

@ -57,7 +57,9 @@ public class SourceMatcher extends GenericEqualsMatcher<List<Map<String, Object>
"scaled_float",
new FieldSpecificMatcher.ScaledFloatMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings),
"unsigned_long",
new FieldSpecificMatcher.UnsignedLongMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings)
new FieldSpecificMatcher.UnsignedLongMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings),
"counted_keyword",
new FieldSpecificMatcher.CountedKeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings)
);
this.dynamicFieldMatcher = new DynamicFieldMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings);
}
@ -100,17 +102,8 @@ public class SourceMatcher extends GenericEqualsMatcher<List<Map<String, Object>
var actualValues = actual.get(name);
var expectedValues = expectedFieldEntry.getValue();
// There are cases when field values are stored in ignored source
// so we try to match them as is first and then apply field specific matcher.
// This is temporary, we should be able to tell when source is exact using mappings.
// See #111916.
var genericMatchResult = matchWithGenericMatcher(actualValues, expectedValues);
if (genericMatchResult.isMatch()) {
continue;
}
var matchIncludingFieldSpecificMatchers = matchWithFieldSpecificMatcher(name, actualValues, expectedValues).orElse(
genericMatchResult
var matchIncludingFieldSpecificMatchers = matchWithFieldSpecificMatcher(name, actualValues, expectedValues).orElseGet(
() -> matchWithGenericMatcher(actualValues, expectedValues)
);
if (matchIncludingFieldSpecificMatchers.isMatch() == false) {
var message = "Source documents don't match for field [" + name + "]: " + matchIncludingFieldSpecificMatchers.getMessage();

View file

@ -145,8 +145,21 @@ public abstract class AbstractXContentTestCase<T extends ToXContent> extends EST
public void test() throws IOException {
for (int runs = 0; runs < numberOfTestRuns; runs++) {
XContentType xContentType = randomFrom(XContentType.values()).canonical();
T testInstance = instanceSupplier.apply(xContentType);
T testInstance = null;
try {
if (xContentType.equals(XContentType.YAML)) {
testInstance = randomValueOtherThanMany(instance -> {
// unicode character U+0085 (NEXT LINE (NEL)) doesn't survive YAML round trip tests (see #97716)
// get a new random instance if we detect this character in the xContent output
try {
return toXContent.apply(instance, xContentType).utf8ToString().contains("\u0085");
} catch (IOException e) {
throw new AssertionError(e);
}
}, () -> instanceSupplier.apply(xContentType));
} else {
testInstance = instanceSupplier.apply(xContentType);
}
BytesReference originalXContent = toXContent.apply(testInstance, xContentType);
BytesReference shuffledContent = insertRandomFieldsAndShuffle(
originalXContent,
@ -173,10 +186,12 @@ public abstract class AbstractXContentTestCase<T extends ToXContent> extends EST
dispose.accept(parsed);
}
} finally {
if (testInstance != null) {
dispose.accept(testInstance);
}
}
}
}
public XContentTester<T> numberOfTestRuns(int numberOfTestRuns) {
this.numberOfTestRuns = numberOfTestRuns;

View file

@ -20,6 +20,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin;
import org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin;
import java.io.IOException;
@ -110,7 +111,7 @@ public class DataGenerationTests extends ESTestCase {
var mappingService = new MapperServiceTestCase() {
@Override
protected Collection<? extends Plugin> getPlugins() {
return List.of(new UnsignedLongMapperPlugin(), new MapperExtrasPlugin());
return List.of(new UnsignedLongMapperPlugin(), new MapperExtrasPlugin(), new CountedKeywordMapperPlugin());
}
}.createMapperService(mappingXContent);

View file

@ -105,4 +105,38 @@ public class SourceMatcherTests extends ESTestCase {
var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false);
assertFalse(sut.match().isMatch());
}
public void testCountedKeywordMatch() throws IOException {
List<Map<String, Object>> actual = List.of(Map.of("field", List.of("a", "b", "a", "c", "b", "a")));
List<Map<String, Object>> expected = List.of(Map.of("field", List.of("a", "b", "a", "c", "b", "a")));
var mapping = XContentBuilder.builder(XContentType.JSON.xContent());
mapping.startObject();
mapping.startObject("_doc");
{
mapping.startObject("field").field("type", "counted_keyword").endObject();
}
mapping.endObject();
mapping.endObject();
var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false);
assertTrue(sut.match().isMatch());
}
public void testCountedKeywordMismatch() throws IOException {
List<Map<String, Object>> actual = List.of(Map.of("field", List.of("a", "b", "a", "c", "b", "a")));
List<Map<String, Object>> expected = List.of(Map.of("field", List.of("a", "b", "c", "a")));
var mapping = XContentBuilder.builder(XContentType.JSON.xContent());
mapping.startObject();
mapping.startObject("_doc");
{
mapping.startObject("field").field("type", "counted_keyword").endObject();
}
mapping.endObject();
mapping.endObject();
var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false);
assertFalse(sut.match().isMatch());
}
}

View file

@ -12,11 +12,13 @@ package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
@ -49,4 +51,42 @@ public class AbstractXContentTestCaseTests extends ESTestCase {
assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field")));
}
}
private record TestToXContent(String field, String value) implements ToXContentFragment {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.field(field, value);
}
}
public void testYamlXContentRoundtripSanitization() throws Exception {
var test = new AbstractXContentTestCase<TestToXContent>() {
@Override
protected TestToXContent createTestInstance() {
// we need to randomly create both a "problematic" and an okay version in order to ensure that the sanitization code
// can draw at least one okay version if polled often enough
return randomBoolean() ? new TestToXContent("a\u0085b", "def") : new TestToXContent("a b", "def");
}
@Override
protected TestToXContent doParseInstance(XContentParser parser) throws IOException {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
String name = parser.currentName();
assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
String value = parser.text();
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
return new TestToXContent(name, value);
};
@Override
protected boolean supportsUnknownFields() {
return false;
}
};
// testFromXContent runs 20 repetitions, enough to hit a YAML xcontent version very likely
test.testFromXContent();
}
}

View file

@ -13,6 +13,7 @@ module org.elasticsearch.deprecation {
requires org.apache.logging.log4j;
requires org.apache.logging.log4j.core;
requires log4j2.ecs.layout;
requires org.apache.lucene.core;
exports org.elasticsearch.xpack.deprecation to org.elasticsearch.server;
exports org.elasticsearch.xpack.deprecation.logging to org.elasticsearch.server;

View file

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.TriConsumer;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Cluster-specific deprecation checks, this is used to populate the {@code cluster_settings} field
*/
public class ClusterDeprecationChecker {
private static final Logger logger = LogManager.getLogger(ClusterDeprecationChecker.class);
private final List<TriConsumer<ClusterState, List<TransformConfig>, List<DeprecationIssue>>> CHECKS = List.of(
this::checkTransformSettings
);
private final NamedXContentRegistry xContentRegistry;
ClusterDeprecationChecker(NamedXContentRegistry xContentRegistry) {
this.xContentRegistry = xContentRegistry;
}
public List<DeprecationIssue> check(ClusterState clusterState, List<TransformConfig> transformConfigs) {
List<DeprecationIssue> allIssues = new ArrayList<>();
CHECKS.forEach(check -> check.apply(clusterState, transformConfigs, allIssues));
return allIssues;
}
private void checkTransformSettings(
ClusterState clusterState,
List<TransformConfig> transformConfigs,
List<DeprecationIssue> allIssues
) {
for (var config : transformConfigs) {
try {
allIssues.addAll(config.checkForDeprecations(xContentRegistry));
} catch (IOException e) {
logger.warn("failed to check transformation settings for '" + config.getId() + "'", e);
}
}
}
}

View file

@ -18,13 +18,13 @@ import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import static java.util.Map.entry;
import static java.util.Map.ofEntries;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks;
/**
* Checks the data streams for deprecation warnings.
@ -44,10 +44,24 @@ public class DataStreamDeprecationChecker implements ResourceDeprecationChecker
/**
* @param clusterState The cluster state provided for the checker
* @param request not used yet in these checks
* @param precomputedData not used yet in these checks
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
@Override
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request) {
public Map<String, List<DeprecationIssue>> check(
ClusterState clusterState,
DeprecationInfoAction.Request request,
TransportDeprecationInfoAction.PrecomputedData precomputedData
) {
return check(clusterState);
}
/**
* @param clusterState The cluster state provided for the checker
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState) {
List<String> dataStreamNames = indexNameExpressionResolver.dataStreamNames(
clusterState,
IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN
@ -58,7 +72,10 @@ public class DataStreamDeprecationChecker implements ResourceDeprecationChecker
Map<String, List<DeprecationIssue>> dataStreamIssues = new HashMap<>();
for (String dataStreamName : dataStreamNames) {
DataStream dataStream = clusterState.metadata().getProject().dataStreams().get(dataStreamName);
List<DeprecationIssue> issuesForSingleDataStream = filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState));
List<DeprecationIssue> issuesForSingleDataStream = DATA_STREAM_CHECKS.stream()
.map(c -> c.apply(dataStream, clusterState))
.filter(Objects::nonNull)
.toList();
if (issuesForSingleDataStream.isEmpty() == false) {
dataStreamIssues.put(dataStreamName, issuesForSingleDataStream);
}

View file

@ -33,7 +33,7 @@ import java.util.List;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static org.elasticsearch.xpack.deprecation.DeprecationChecks.SKIP_DEPRECATIONS_SETTING;
import static org.elasticsearch.xpack.deprecation.TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING;
import static org.elasticsearch.xpack.deprecation.logging.DeprecationIndexingComponent.DEPRECATION_INDEXING_FLUSH_INTERVAL;
/**

View file

@ -1,107 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* Class containing all the cluster, node, and index deprecation checks that will be served
* by the {@link DeprecationInfoAction}.
*/
public class DeprecationChecks {
public static final Setting<List<String>> SKIP_DEPRECATIONS_SETTING = Setting.stringListSetting(
"deprecation.skip_deprecated_settings",
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
private DeprecationChecks() {}
static List<Function<ClusterState, DeprecationIssue>> CLUSTER_SETTINGS_CHECKS = List.of();
static final List<
NodeDeprecationCheck<Settings, PluginsAndModules, ClusterState, XPackLicenseState, DeprecationIssue>> NODE_SETTINGS_CHECKS = List
.of(
NodeDeprecationChecks::checkMultipleDataPaths,
NodeDeprecationChecks::checkDataPathsList,
NodeDeprecationChecks::checkSharedDataPathSetting,
NodeDeprecationChecks::checkReservedPrefixedRealmNames,
NodeDeprecationChecks::checkExporterUseIngestPipelineSettings,
NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting,
NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting,
NodeDeprecationChecks::checkMonitoringSettingHistoryDuration,
NodeDeprecationChecks::checkMonitoringSettingHistoryDuration,
NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecovery,
NodeDeprecationChecks::checkMonitoringSettingCollectIndices,
NodeDeprecationChecks::checkMonitoringSettingCollectCcrTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectEnrichStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecoveryStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectIndexStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectMlJobStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectNodeStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectClusterStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersHost,
NodeDeprecationChecks::checkMonitoringSettingExportersBulkTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersConnectionTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersConnectionReadTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersAuthUsername,
NodeDeprecationChecks::checkMonitoringSettingExportersAuthPass,
NodeDeprecationChecks::checkMonitoringSettingExportersSSL,
NodeDeprecationChecks::checkMonitoringSettingExportersProxyBase,
NodeDeprecationChecks::checkMonitoringSettingExportersSniffEnabled,
NodeDeprecationChecks::checkMonitoringSettingExportersHeaders,
NodeDeprecationChecks::checkMonitoringSettingExportersTemplateTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersMasterTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersEnabled,
NodeDeprecationChecks::checkMonitoringSettingExportersType,
NodeDeprecationChecks::checkMonitoringSettingExportersAlertsEnabled,
NodeDeprecationChecks::checkMonitoringSettingExportersAlertsBlacklist,
NodeDeprecationChecks::checkMonitoringSettingExportersIndexNameTimeFormat,
NodeDeprecationChecks::checkMonitoringSettingDecommissionAlerts,
NodeDeprecationChecks::checkMonitoringSettingEsCollectionEnabled,
NodeDeprecationChecks::checkMonitoringSettingCollectionEnabled,
NodeDeprecationChecks::checkMonitoringSettingCollectionInterval,
NodeDeprecationChecks::checkScriptContextCache,
NodeDeprecationChecks::checkScriptContextCompilationsRateLimitSetting,
NodeDeprecationChecks::checkScriptContextCacheSizeSetting,
NodeDeprecationChecks::checkScriptContextCacheExpirationSetting,
NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting,
NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting,
NodeDeprecationChecks::checkEqlEnabledSetting,
NodeDeprecationChecks::checkNodeAttrData,
NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting,
NodeDeprecationChecks::checkTracingApmSettings
);
/**
* helper utility function to reduce repeat of running a specific {@link List} of checks.
*
* @param checks The functional checks to execute using the mapper function
* @param mapper The function that executes the lambda check with the appropriate arguments
* @param <T> The signature of the check (BiFunction, Function, including the appropriate arguments)
* @return The list of {@link DeprecationIssue} that were found in the cluster
*/
static <T> List<DeprecationIssue> filterChecks(List<T> checks, Function<T, DeprecationIssue> mapper) {
return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList());
}
@FunctionalInterface
public interface NodeDeprecationCheck<A, B, C, D, R> {
R apply(A first, B second, C third, D fourth);
}
}

View file

@ -12,41 +12,25 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.transport.Transports;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.elasticsearch.action.ValidateActions.addValidationError;
@ -59,93 +43,6 @@ public class DeprecationInfoAction extends ActionType<DeprecationInfoAction.Resp
super(NAME);
}
/**
* helper utility function to reduce repeat of running a specific {@link Set} of checks.
*
* @param checks The functional checks to execute using the mapper function
* @param mapper The function that executes the lambda check with the appropriate arguments
* @param <T> The signature of the check (BiFunction, Function, including the appropriate arguments)
* @return The list of {@link DeprecationIssue} that were found in the cluster
*/
public static <T> List<DeprecationIssue> filterChecks(List<T> checks, Function<T, DeprecationIssue> mapper) {
return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList());
}
/**
* This method rolls up DeprecationIssues that are identical but on different nodes. It also roles up DeprecationIssues that are
* identical (and on different nodes) except that they differ in the removable settings listed in their meta object. We roll these up
* by taking the intersection of all removable settings in otherwise identical DeprecationIssues. That way we don't claim that a
* setting can be automatically removed if any node has it in its elasticsearch.yml.
* @param response
* @return
*/
private static List<DeprecationIssue> mergeNodeIssues(NodesDeprecationCheckResponse response) {
// A collection whose values are lists of DeprecationIssues that differ only by meta values (if that):
Collection<List<Tuple<DeprecationIssue, String>>> issuesToMerge = getDeprecationIssuesThatDifferOnlyByMeta(response.getNodes());
// A map of DeprecationIssues (containing only the intersection of removable settings) to the nodes they are seen on
Map<DeprecationIssue, List<String>> issueToListOfNodesMap = getMergedIssuesToNodesMap(issuesToMerge);
return issueToListOfNodesMap.entrySet().stream().map(entry -> {
DeprecationIssue issue = entry.getKey();
String details = issue.getDetails() != null ? issue.getDetails() + " " : "";
return new DeprecationIssue(
issue.getLevel(),
issue.getMessage(),
issue.getUrl(),
details + "(nodes impacted: " + entry.getValue() + ")",
issue.isResolveDuringRollingUpgrade(),
issue.getMeta()
);
}).collect(Collectors.toList());
}
/*
* This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that
* differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all
* nodes in the cluster were configured identically then all tuples in a list will differ only by the node name.
*/
private static Collection<List<Tuple<DeprecationIssue, String>>> getDeprecationIssuesThatDifferOnlyByMeta(
List<NodesDeprecationCheckAction.NodeResponse> nodeResponses
) {
Map<DeprecationIssue, List<Tuple<DeprecationIssue, String>>> issuesToMerge = new HashMap<>();
for (NodesDeprecationCheckAction.NodeResponse resp : nodeResponses) {
for (DeprecationIssue issue : resp.getDeprecationIssues()) {
issuesToMerge.computeIfAbsent(
new DeprecationIssue(
issue.getLevel(),
issue.getMessage(),
issue.getUrl(),
issue.getDetails(),
issue.isResolveDuringRollingUpgrade(),
null // Intentionally removing meta from the key so that it's not taken into account for equality
),
(key) -> new ArrayList<>()
).add(new Tuple<>(issue, resp.getNode().getName()));
}
}
return issuesToMerge.values();
}
/*
* At this point we have one DeprecationIssue per node for a given deprecation. This method rolls them up into a single DeprecationIssue
* with a list of nodes that they appear on. If two DeprecationIssues on two different nodes differ only by the set of removable
* settings (i.e. they have different elasticsearch.yml configurations) then this method takes the intersection of those settings when
* it rolls them up.
*/
private static Map<DeprecationIssue, List<String>> getMergedIssuesToNodesMap(
Collection<List<Tuple<DeprecationIssue, String>>> issuesToMerge
) {
Map<DeprecationIssue, List<String>> issueToListOfNodesMap = new HashMap<>();
for (List<Tuple<DeprecationIssue, String>> similarIssues : issuesToMerge) {
DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings(
similarIssues.stream().map(Tuple::v1).toList()
);
issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>())
.addAll(similarIssues.stream().map(Tuple::v2).toList());
}
return issueToListOfNodesMap;
}
public static class Response extends ActionResponse implements ToXContentObject {
static final Set<String> RESERVED_NAMES = Set.of(
"cluster_settings",
@ -289,143 +186,6 @@ public class DeprecationInfoAction extends ActionType<DeprecationInfoAction.Resp
return Objects.hash(clusterSettingsIssues, nodeSettingsIssues, resourceDeprecationIssues, pluginSettingsIssues);
}
/**
* This is the function that does the bulk of the logic of taking the appropriate ES dependencies
* like {@link NodeInfo}, {@link ClusterState}. Alongside these objects and the list of deprecation checks,
* this function will run through all the checks and build out the final list of issues that exist in the
* cluster.
*
* @param state The cluster state
* @param indexNameExpressionResolver Used to resolve indices into their concrete names
* @param request The originating request containing the index expressions to evaluate
* @param nodeDeprecationResponse The response containing the deprecation issues found on each node
* @param clusterSettingsChecks The list of cluster-level checks
* @param pluginSettingIssues this map gets modified to move transform deprecation issues into cluster_settings
* @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the
* indexes specified by indexNames
* @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type
* to issues grouped by the resource name.
* @return The list of deprecation issues found in the cluster
*/
public static DeprecationInfoAction.Response from(
ClusterState state,
IndexNameExpressionResolver indexNameExpressionResolver,
Request request,
NodesDeprecationCheckResponse nodeDeprecationResponse,
List<Function<ClusterState, DeprecationIssue>> clusterSettingsChecks,
Map<String, List<DeprecationIssue>> pluginSettingIssues,
List<String> skipTheseDeprecatedSettings,
List<ResourceDeprecationChecker> resourceDeprecationCheckers
) {
assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive");
// Allow system index access here to prevent deprecation warnings when we call this API
String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
ClusterState stateWithSkippedSettingsRemoved = removeSkippedSettings(state, concreteIndexNames, skipTheseDeprecatedSettings);
List<DeprecationIssue> clusterSettingsIssues = filterChecks(
clusterSettingsChecks,
(c) -> c.apply(stateWithSkippedSettingsRemoved)
);
List<DeprecationIssue> nodeSettingsIssues = mergeNodeIssues(nodeDeprecationResponse);
Map<String, Map<String, List<DeprecationIssue>>> resourceDeprecationIssues = new HashMap<>();
for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) {
Map<String, List<DeprecationIssue>> issues = resourceDeprecationChecker.check(stateWithSkippedSettingsRemoved, request);
if (issues.isEmpty() == false) {
resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues);
}
}
// WORKAROUND: move transform deprecation issues into cluster_settings
List<DeprecationIssue> transformDeprecations = pluginSettingIssues.remove(
TransformDeprecationChecker.TRANSFORM_DEPRECATION_KEY
);
if (transformDeprecations != null) {
clusterSettingsIssues.addAll(transformDeprecations);
}
return new DeprecationInfoAction.Response(
clusterSettingsIssues,
nodeSettingsIssues,
resourceDeprecationIssues,
pluginSettingIssues
);
}
}
/**
* Removes the skipped settings from the selected indices and the component and index templates.
* @param state The cluster state to modify
* @param indexNames The names of the indexes whose settings need to be filtered
* @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the
* indexes specified by indexNames
* @return A modified cluster state with the given settings removed
*/
private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List<String> skipTheseDeprecatedSettings) {
// Short-circuit, no need to reconstruct the cluster state if there are no settings to remove
if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) {
return state;
}
ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state);
Metadata.Builder metadataBuilder = Metadata.builder(state.metadata());
metadataBuilder.transientSettings(
metadataBuilder.transientSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)
);
metadataBuilder.persistentSettings(
metadataBuilder.persistentSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)
);
Map<String, IndexMetadata> indicesBuilder = new HashMap<>(state.getMetadata().getProject().indices());
for (String indexName : indexNames) {
IndexMetadata indexMetadata = state.getMetadata().getProject().index(indexName);
IndexMetadata.Builder filteredIndexMetadataBuilder = new IndexMetadata.Builder(indexMetadata);
Settings filteredSettings = indexMetadata.getSettings()
.filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false);
filteredIndexMetadataBuilder.settings(filteredSettings);
indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build());
}
metadataBuilder.componentTemplates(state.metadata().getProject().componentTemplates().entrySet().stream().map(entry -> {
String templateName = entry.getKey();
ComponentTemplate componentTemplate = entry.getValue();
Template template = componentTemplate.template();
if (template.settings() == null || template.settings().isEmpty()) {
return Tuple.tuple(templateName, componentTemplate);
}
return Tuple.tuple(
templateName,
new ComponentTemplate(
Template.builder(template)
.settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false))
.build(),
componentTemplate.version(),
componentTemplate.metadata(),
componentTemplate.deprecated()
)
);
}).collect(Collectors.toMap(Tuple::v1, Tuple::v2)));
metadataBuilder.indexTemplates(state.metadata().getProject().templatesV2().entrySet().stream().map(entry -> {
String templateName = entry.getKey();
ComposableIndexTemplate indexTemplate = entry.getValue();
Template template = indexTemplate.template();
if (template == null || template.settings() == null || template.settings().isEmpty()) {
return Tuple.tuple(templateName, indexTemplate);
}
return Tuple.tuple(
templateName,
indexTemplate.toBuilder()
.template(
Template.builder(indexTemplate.template())
.settings(
indexTemplate.template()
.settings()
.filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)
)
)
.build()
);
}).collect(Collectors.toMap(Tuple::v1, Tuple::v2)));
metadataBuilder.indices(indicesBuilder);
clusterStateBuilder.metadata(metadataBuilder);
return clusterStateBuilder.build();
}
public static class Request extends MasterNodeReadRequest<Request> implements IndicesRequest.Replaceable {

View file

@ -19,9 +19,9 @@ import org.elasticsearch.xpack.core.ilm.Phase;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE;
@ -33,17 +33,28 @@ import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.containsD
public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker {
public static final String NAME = "ilm_policies";
private static final List<Function<LifecyclePolicy, DeprecationIssue>> CHECKS = List.of(
IlmPolicyDeprecationChecker::checkLegacyTiers,
IlmPolicyDeprecationChecker::checkFrozenAction
);
private final List<Function<LifecyclePolicy, DeprecationIssue>> checks = List.of(this::checkLegacyTiers, this::checkFrozenAction);
/**
* @param clusterState The cluster state provided for the checker
* @param request not used yet in these checks
* @param precomputedData not used yet in these checks
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
@Override
public Map<String, List<DeprecationIssue>> check(
ClusterState clusterState,
DeprecationInfoAction.Request request,
TransportDeprecationInfoAction.PrecomputedData precomputedData
) {
return check(clusterState);
}
/**
* @param clusterState The cluster state provided for the checker
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
@Override
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request) {
Map<String, List<DeprecationIssue>> check(ClusterState clusterState) {
IndexLifecycleMetadata lifecycleMetadata = clusterState.metadata().getProject().custom(IndexLifecycleMetadata.TYPE);
if (lifecycleMetadata == null || lifecycleMetadata.getPolicyMetadatas().isEmpty()) {
return Map.of();
@ -53,7 +64,10 @@ public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker {
String name = entry.getKey();
LifecyclePolicyMetadata policyMetadata = entry.getValue();
List<DeprecationIssue> issuesForSinglePolicy = filterChecks(CHECKS, c -> c.apply(policyMetadata.getPolicy()));
List<DeprecationIssue> issuesForSinglePolicy = checks.stream()
.map(c -> c.apply(policyMetadata.getPolicy()))
.filter(Objects::nonNull)
.toList();
if (issuesForSinglePolicy.isEmpty() == false) {
issues.put(name, issuesForSinglePolicy);
}
@ -61,7 +75,7 @@ public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker {
return issues.isEmpty() ? Map.of() : issues;
}
static DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) {
private DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) {
for (Phase phase : policy.getPhases().values()) {
AllocateAction allocateAction = (AllocateAction) phase.getActions().get(AllocateAction.NAME);
if (allocateAction != null) {
@ -82,7 +96,7 @@ public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
static DeprecationIssue checkFrozenAction(LifecyclePolicy policy) {
private DeprecationIssue checkFrozenAction(LifecyclePolicy policy) {
for (Phase phase : policy.getPhases().values()) {
if (phase.getActions().containsKey(FreezeAction.NAME)) {
return new DeprecationIssue(

View file

@ -10,6 +10,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.LegacyFormatNames;
import org.elasticsearch.index.IndexModule;
@ -17,17 +18,19 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL;
@ -39,20 +42,35 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
public static final String NAME = "index_settings";
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final Map<String, List<String>> indexToTransformIds;
private final List<TriFunction<IndexMetadata, ClusterState, Map<String, List<String>>, DeprecationIssue>> checks = List.of(
this::oldIndicesCheck,
this::ignoredOldIndicesCheck,
this::translogRetentionSettingCheck,
this::checkIndexDataPath,
this::storeTypeSettingCheck,
this::deprecatedCamelCasePattern,
this::legacyRoutingSettingCheck
);
public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver, Map<String, List<String>> indexToTransformIds) {
public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver) {
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.indexToTransformIds = indexToTransformIds;
}
@Override
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request) {
public Map<String, List<DeprecationIssue>> check(
ClusterState clusterState,
DeprecationInfoAction.Request request,
TransportDeprecationInfoAction.PrecomputedData precomputedData
) {
Map<String, List<DeprecationIssue>> indexSettingsIssues = new HashMap<>();
String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(clusterState, request);
Map<String, List<String>> indexToTransformIds = indexToTransformIds(precomputedData.transformConfigs());
for (String concreteIndex : concreteIndexNames) {
IndexMetadata indexMetadata = clusterState.getMetadata().getProject().index(concreteIndex);
List<DeprecationIssue> singleIndexIssues = filterChecks(indexSettingsChecks(), c -> c.apply(indexMetadata, clusterState));
List<DeprecationIssue> singleIndexIssues = checks.stream()
.map(c -> c.apply(indexMetadata, clusterState, indexToTransformIds))
.filter(Objects::nonNull)
.toList();
if (singleIndexIssues.isEmpty() == false) {
indexSettingsIssues.put(concreteIndex, singleIndexIssues);
}
@ -63,24 +81,16 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return indexSettingsIssues;
}
private List<BiFunction<IndexMetadata, ClusterState, DeprecationIssue>> indexSettingsChecks() {
return List.of(
this::oldIndicesCheck,
this::ignoredOldIndicesCheck,
IndexDeprecationChecker::translogRetentionSettingCheck,
IndexDeprecationChecker::checkIndexDataPath,
IndexDeprecationChecker::storeTypeSettingCheck,
IndexDeprecationChecker::deprecatedCamelCasePattern,
IndexDeprecationChecker::legacyRoutingSettingCheck
);
}
@Override
public String getName() {
return NAME;
}
private DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue oldIndicesCheck(
IndexMetadata indexMetadata,
ClusterState clusterState,
Map<String, List<String>> indexToTransformIds
) {
// TODO: this check needs to be revised. It's trivially true right now.
IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion();
// We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks
@ -91,13 +101,13 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
"https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html",
"This index has version: " + currentCompatibilityVersion.toReleaseVersion(),
false,
meta(indexMetadata)
meta(indexMetadata, indexToTransformIds)
);
}
return null;
}
private Map<String, Object> meta(IndexMetadata indexMetadata) {
private Map<String, Object> meta(IndexMetadata indexMetadata, Map<String, List<String>> indexToTransformIds) {
var transforms = indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of());
if (transforms.isEmpty()) {
return Map.of("reindex_required", true);
@ -106,7 +116,11 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
}
}
private DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue ignoredOldIndicesCheck(
IndexMetadata indexMetadata,
ClusterState clusterState,
Map<String, List<String>> indexToTransformIds
) {
IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion();
// We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks
if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, true) && isNotDataStreamIndex(indexMetadata, clusterState)) {
@ -118,17 +132,21 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
+ currentCompatibilityVersion.toReleaseVersion()
+ " and will be supported as read-only in 9.0",
false,
meta(indexMetadata)
meta(indexMetadata, indexToTransformIds)
);
}
return null;
}
private static boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) {
private boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) {
return clusterState.metadata().getProject().findDataStreams(indexMetadata.getIndex().getName()).isEmpty();
}
private static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue translogRetentionSettingCheck(
IndexMetadata indexMetadata,
ClusterState clusterState,
Map<String, List<String>> ignored
) {
final boolean softDeletesEnabled = IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexMetadata.getSettings());
if (softDeletesEnabled) {
if (IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.exists(indexMetadata.getSettings())
@ -155,7 +173,7 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
private static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState, Map<String, List<String>> ignored) {
if (IndexMetadata.INDEX_DATA_PATH_SETTING.exists(indexMetadata.getSettings())) {
final String message = String.format(
Locale.ROOT,
@ -170,7 +188,11 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
private static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue storeTypeSettingCheck(
IndexMetadata indexMetadata,
ClusterState clusterState,
Map<String, List<String>> ignored
) {
final String storeType = IndexModule.INDEX_STORE_TYPE_SETTING.get(indexMetadata.getSettings());
if (IndexModule.Type.SIMPLEFS.match(storeType)) {
return new DeprecationIssue(
@ -187,7 +209,11 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
private static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue legacyRoutingSettingCheck(
IndexMetadata indexMetadata,
ClusterState clusterState,
Map<String, List<String>> ignored
) {
List<String> deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexMetadata.getSettings());
if (deprecatedSettings.isEmpty()) {
return null;
@ -203,7 +229,7 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
);
}
private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer<MappingMetadata, Map<String, Object>> checker) {
private void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer<MappingMetadata, Map<String, Object>> checker) {
if (indexMetadata.mapping() != null) {
Map<String, Object> sourceAsMap = indexMetadata.mapping().sourceAsMap();
checker.accept(indexMetadata.mapping(), sourceAsMap);
@ -221,7 +247,7 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
* @return a list of issues found in fields
*/
@SuppressWarnings("unchecked")
private static List<String> findInPropertiesRecursively(
private List<String> findInPropertiesRecursively(
String type,
Map<String, Object> parentMap,
Function<Map<?, ?>, Boolean> predicate,
@ -275,7 +301,11 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return issues;
}
private static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) {
private DeprecationIssue deprecatedCamelCasePattern(
IndexMetadata indexMetadata,
ClusterState clusterState,
Map<String, List<String>> ignored
) {
List<String> fields = new ArrayList<>();
fieldLevelMappingIssue(
indexMetadata,
@ -283,8 +313,8 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
findInPropertiesRecursively(
mappingMetadata.type(),
sourceAsMap,
IndexDeprecationChecker::isDateFieldWithCamelCasePattern,
IndexDeprecationChecker::changeFormatToSnakeCase,
this::isDateFieldWithCamelCasePattern,
this::changeFormatToSnakeCase,
"",
""
)
@ -305,7 +335,7 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
private static boolean isDateFieldWithCamelCasePattern(Map<?, ?> property) {
private boolean isDateFieldWithCamelCasePattern(Map<?, ?> property) {
if ("date".equals(property.get("type")) && property.containsKey("format")) {
String[] patterns = DateFormatter.splitCombinedPatterns((String) property.get("format"));
for (String pattern : patterns) {
@ -316,7 +346,7 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
return false;
}
private static String changeFormatToSnakeCase(String type, Map.Entry<?, ?> entry) {
private String changeFormatToSnakeCase(String type, Map.Entry<?, ?> entry) {
Map<?, ?> value = (Map<?, ?>) entry.getValue();
final String formatFieldValue = (String) value.get("format");
String[] patterns = DateFormatter.splitCombinedPatterns(formatFieldValue);
@ -332,4 +362,14 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker {
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
private Map<String, List<String>> indexToTransformIds(List<TransformConfig> transformConfigs) {
return transformConfigs.stream()
.collect(
Collectors.groupingBy(
config -> config.getDestination().getIndex(),
Collectors.mapping(TransformConfig::getId, Collectors.toList())
)
);
}
}

View file

@ -1,77 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE;
/**
* Checks the legacy index templates for deprecation warnings.
*/
public class LegacyIndexTemplateDeprecationChecker implements ResourceDeprecationChecker {
public static final String NAME = "legacy_templates";
private static final List<Function<IndexTemplateMetadata, DeprecationIssue>> CHECKS = List.of(
LegacyIndexTemplateDeprecationChecker::checkIndexTemplates
);
/**
* @param clusterState The cluster state provided for the checker
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
@Override
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request) {
var templates = clusterState.metadata().getProject().templates().entrySet();
if (templates.isEmpty()) {
return Map.of();
}
Map<String, List<DeprecationIssue>> issues = new HashMap<>();
for (Map.Entry<String, IndexTemplateMetadata> entry : templates) {
String name = entry.getKey();
IndexTemplateMetadata template = entry.getValue();
List<DeprecationIssue> issuesForSingleIndexTemplate = filterChecks(CHECKS, c -> c.apply(template));
if (issuesForSingleIndexTemplate.isEmpty() == false) {
issues.put(name, issuesForSingleIndexTemplate);
}
}
return issues.isEmpty() ? Map.of() : issues;
}
static DeprecationIssue checkIndexTemplates(IndexTemplateMetadata indexTemplateMetadata) {
List<String> deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexTemplateMetadata.settings());
if (deprecatedSettings.isEmpty()) {
return null;
}
return new DeprecationIssue(
DeprecationIssue.Level.WARNING,
DEPRECATION_MESSAGE,
DEPRECATION_HELP_URL,
"One or more of your legacy index templates is configured with 'index.routing.allocation.*.data' settings. "
+ DEPRECATION_COMMON_DETAIL,
false,
DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings)
);
}
@Override
public String getName() {
return NAME;
}
}

View file

@ -0,0 +1,138 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ThreadedActionListener;
import org.elasticsearch.client.internal.Client;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Retrieves the individual node checks and reduces them to a list of deprecation warnings
*/
public class NodeDeprecationChecker {
private static final Logger logger = LogManager.getLogger(NodeDeprecationChecker.class);
private final ThreadPool threadPool;
public NodeDeprecationChecker(ThreadPool threadPool) {
this.threadPool = threadPool;
}
public void check(Client client, ActionListener<List<DeprecationIssue>> listener) {
NodesDeprecationCheckRequest nodeDepReq = new NodesDeprecationCheckRequest("_all");
ClientHelper.executeAsyncWithOrigin(
client,
ClientHelper.DEPRECATION_ORIGIN,
NodesDeprecationCheckAction.INSTANCE,
nodeDepReq,
new ThreadedActionListener<>(threadPool.generic(), listener.delegateFailureAndWrap((l, response) -> {
if (response.hasFailures()) {
List<String> failedNodeIds = response.failures()
.stream()
.map(failure -> failure.nodeId() + ": " + failure.getMessage())
.collect(Collectors.toList());
logger.warn("nodes failed to run deprecation checks: {}", failedNodeIds);
for (FailedNodeException failure : response.failures()) {
logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure);
}
}
l.onResponse(reduceToDeprecationIssues(response));
}))
);
}
/**
* This method rolls up DeprecationIssues that are identical but on different nodes. It also rolls up DeprecationIssues that are
* identical (and on different nodes) except that they differ in the removable settings listed in their meta object. We roll these up
* by taking the intersection of all removable settings in otherwise identical DeprecationIssues. That way we don't claim that a
* setting can be automatically removed if any node has it in its elasticsearch.yml.
* @param response the response that contains the deprecation issues of single nodes
* @return a list of deprecation issues grouped accordingly.
*/
static List<DeprecationIssue> reduceToDeprecationIssues(NodesDeprecationCheckResponse response) {
// A collection whose values are lists of DeprecationIssues that differ only by meta values (if that):
Collection<List<Tuple<DeprecationIssue, String>>> issuesToMerge = getDeprecationIssuesThatDifferOnlyByMeta(response.getNodes());
// A map of DeprecationIssues (containing only the intersection of removable settings) to the nodes they are seen on
Map<DeprecationIssue, List<String>> issueToListOfNodesMap = getMergedIssuesToNodesMap(issuesToMerge);
return issueToListOfNodesMap.entrySet().stream().map(entry -> {
DeprecationIssue issue = entry.getKey();
String details = issue.getDetails() != null ? issue.getDetails() + " " : "";
return new DeprecationIssue(
issue.getLevel(),
issue.getMessage(),
issue.getUrl(),
details + "(nodes impacted: " + entry.getValue() + ")",
issue.isResolveDuringRollingUpgrade(),
issue.getMeta()
);
}).collect(Collectors.toList());
}
/*
* This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that
* differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all
* nodes in the cluster were configured identically then all tuples in a list will differ only by the node name.
*/
private static Collection<List<Tuple<DeprecationIssue, String>>> getDeprecationIssuesThatDifferOnlyByMeta(
List<NodesDeprecationCheckAction.NodeResponse> nodeResponses
) {
Map<DeprecationIssue, List<Tuple<DeprecationIssue, String>>> issuesToMerge = new HashMap<>();
for (NodesDeprecationCheckAction.NodeResponse resp : nodeResponses) {
for (DeprecationIssue issue : resp.getDeprecationIssues()) {
issuesToMerge.computeIfAbsent(
new DeprecationIssue(
issue.getLevel(),
issue.getMessage(),
issue.getUrl(),
issue.getDetails(),
issue.isResolveDuringRollingUpgrade(),
null // Intentionally removing meta from the key so that it's not taken into account for equality
),
(key) -> new ArrayList<>()
).add(new Tuple<>(issue, resp.getNode().getName()));
}
}
return issuesToMerge.values();
}
/*
* At this point we have one DeprecationIssue per node for a given deprecation. This method rolls them up into a single DeprecationIssue
* with a list of nodes that they appear on. If two DeprecationIssues on two different nodes differ only by the set of removable
* settings (i.e. they have different elasticsearch.yml configurations) then this method takes the intersection of those settings when
* it rolls them up.
*/
private static Map<DeprecationIssue, List<String>> getMergedIssuesToNodesMap(
Collection<List<Tuple<DeprecationIssue, String>>> issuesToMerge
) {
Map<DeprecationIssue, List<String>> issueToListOfNodesMap = new HashMap<>();
for (List<Tuple<DeprecationIssue, String>> similarIssues : issuesToMerge) {
DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings(
similarIssues.stream().map(Tuple::v1).toList()
);
issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>())
.addAll(similarIssues.stream().map(Tuple::v2).toList());
}
return issueToListOfNodesMap;
}
}

View file

@ -41,6 +41,60 @@ import static org.elasticsearch.xpack.core.security.authc.RealmSettings.RESERVED
public class NodeDeprecationChecks {
// Visible for testing
static final List<
NodeDeprecationCheck<Settings, PluginsAndModules, ClusterState, XPackLicenseState, DeprecationIssue>> SINGLE_NODE_CHECKS = List.of(
NodeDeprecationChecks::checkMultipleDataPaths,
NodeDeprecationChecks::checkDataPathsList,
NodeDeprecationChecks::checkSharedDataPathSetting,
NodeDeprecationChecks::checkReservedPrefixedRealmNames,
NodeDeprecationChecks::checkExporterUseIngestPipelineSettings,
NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting,
NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting,
NodeDeprecationChecks::checkMonitoringSettingHistoryDuration,
NodeDeprecationChecks::checkMonitoringSettingHistoryDuration,
NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecovery,
NodeDeprecationChecks::checkMonitoringSettingCollectIndices,
NodeDeprecationChecks::checkMonitoringSettingCollectCcrTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectEnrichStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecoveryStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectIndexStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectMlJobStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectNodeStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingCollectClusterStatsTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersHost,
NodeDeprecationChecks::checkMonitoringSettingExportersBulkTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersConnectionTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersConnectionReadTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersAuthUsername,
NodeDeprecationChecks::checkMonitoringSettingExportersAuthPass,
NodeDeprecationChecks::checkMonitoringSettingExportersSSL,
NodeDeprecationChecks::checkMonitoringSettingExportersProxyBase,
NodeDeprecationChecks::checkMonitoringSettingExportersSniffEnabled,
NodeDeprecationChecks::checkMonitoringSettingExportersHeaders,
NodeDeprecationChecks::checkMonitoringSettingExportersTemplateTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersMasterTimeout,
NodeDeprecationChecks::checkMonitoringSettingExportersEnabled,
NodeDeprecationChecks::checkMonitoringSettingExportersType,
NodeDeprecationChecks::checkMonitoringSettingExportersAlertsEnabled,
NodeDeprecationChecks::checkMonitoringSettingExportersAlertsBlacklist,
NodeDeprecationChecks::checkMonitoringSettingExportersIndexNameTimeFormat,
NodeDeprecationChecks::checkMonitoringSettingDecommissionAlerts,
NodeDeprecationChecks::checkMonitoringSettingEsCollectionEnabled,
NodeDeprecationChecks::checkMonitoringSettingCollectionEnabled,
NodeDeprecationChecks::checkMonitoringSettingCollectionInterval,
NodeDeprecationChecks::checkScriptContextCache,
NodeDeprecationChecks::checkScriptContextCompilationsRateLimitSetting,
NodeDeprecationChecks::checkScriptContextCacheSizeSetting,
NodeDeprecationChecks::checkScriptContextCacheExpirationSetting,
NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting,
NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting,
NodeDeprecationChecks::checkEqlEnabledSetting,
NodeDeprecationChecks::checkNodeAttrData,
NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting,
NodeDeprecationChecks::checkTracingApmSettings
);
static DeprecationIssue checkDeprecatedSetting(
final Settings clusterSettings,
final Settings nodeSettings,
@ -77,15 +131,6 @@ public class NodeDeprecationChecks {
return canAutoRemoveSetting ? DeprecationIssue.createMetaMapForRemovableSettings(removableSettings) : null;
}
static DeprecationIssue checkRemovedSetting(
final Settings clusterSettings,
final Settings nodeSettings,
final Setting<?> removedSetting,
final String url
) {
return checkRemovedSetting(clusterSettings, nodeSettings, removedSetting, url, null, DeprecationIssue.Level.CRITICAL);
}
static DeprecationIssue checkRemovedSetting(
final Settings clusterSettings,
final Settings nodeSettings,
@ -1012,4 +1057,9 @@ public class NodeDeprecationChecks {
DeprecationIssue.Level.CRITICAL
);
}
@FunctionalInterface
public interface NodeDeprecationCheck<A, B, C, D, R> {
R apply(A first, B second, C third, D fourth);
}
}

View file

@ -24,8 +24,14 @@ public interface ResourceDeprecationChecker {
* This runs the checks for the current deprecation checker.
*
* @param clusterState The cluster state provided for the checker
* @param request The deprecation request that triggered this check
* @param precomputedData Data that have been remotely retrieved and might be useful in the checks
*/
Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request);
Map<String, List<DeprecationIssue>> check(
ClusterState clusterState,
DeprecationInfoAction.Request request,
TransportDeprecationInfoAction.PrecomputedData precomputedData
);
/**
* @return The name of the checker

View file

@ -19,9 +19,9 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL;
import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE;
@ -32,20 +32,34 @@ import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATI
public class TemplateDeprecationChecker implements ResourceDeprecationChecker {
public static final String NAME = "templates";
private static final List<Function<ComposableIndexTemplate, DeprecationIssue>> INDEX_TEMPLATE_CHECKS = List.of(
TemplateDeprecationChecker::checkLegacyTiersInIndexTemplate
private final List<Function<ComposableIndexTemplate, DeprecationIssue>> indexTemplateChecks = List.of(
this::checkLegacyTiersInIndexTemplate
);
private static final List<Function<ComponentTemplate, DeprecationIssue>> COMPONENT_TEMPLATE_CHECKS = List.of(
TemplateDeprecationChecker::checkSourceModeInComponentTemplates,
TemplateDeprecationChecker::checkLegacyTiersInComponentTemplates
private final List<Function<ComponentTemplate, DeprecationIssue>> componentTemplateChecks = List.of(
this::checkSourceModeInComponentTemplates,
this::checkLegacyTiersInComponentTemplates
);
/**
* @param clusterState The cluster state provided for the checker
* @param request not used yet in these checks
* @param precomputedData not used yet in these checks
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
@Override
public Map<String, List<DeprecationIssue>> check(
ClusterState clusterState,
DeprecationInfoAction.Request request,
TransportDeprecationInfoAction.PrecomputedData precomputedData
) {
return check(clusterState);
}
/**
* @param clusterState The cluster state provided for the checker
* @return the name of the data streams that have violated the checks with their respective warnings.
*/
@Override
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request) {
Map<String, List<DeprecationIssue>> check(ClusterState clusterState) {
var indexTemplates = clusterState.metadata().getProject().templatesV2().entrySet();
var componentTemplates = clusterState.metadata().getProject().componentTemplates().entrySet();
if (indexTemplates.isEmpty() && componentTemplates.isEmpty()) {
@ -56,7 +70,10 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker {
String name = entry.getKey();
ComposableIndexTemplate template = entry.getValue();
List<DeprecationIssue> issuesForSingleIndexTemplate = filterChecks(INDEX_TEMPLATE_CHECKS, c -> c.apply(template));
List<DeprecationIssue> issuesForSingleIndexTemplate = indexTemplateChecks.stream()
.map(c -> c.apply(template))
.filter(Objects::nonNull)
.toList();
if (issuesForSingleIndexTemplate.isEmpty() == false) {
issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate);
}
@ -65,7 +82,10 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker {
String name = entry.getKey();
ComponentTemplate template = entry.getValue();
List<DeprecationIssue> issuesForSingleIndexTemplate = filterChecks(COMPONENT_TEMPLATE_CHECKS, c -> c.apply(template));
List<DeprecationIssue> issuesForSingleIndexTemplate = componentTemplateChecks.stream()
.map(c -> c.apply(template))
.filter(Objects::nonNull)
.toList();
if (issuesForSingleIndexTemplate.isEmpty() == false) {
issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate);
}
@ -73,7 +93,7 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker {
return issues.isEmpty() ? Map.of() : issues;
}
static DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) {
private DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) {
Template template = composableIndexTemplate.template();
if (template != null) {
List<String> deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings());
@ -93,7 +113,7 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) {
private DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) {
if (template.template().mappings() != null) {
var sourceAsMap = (Map<?, ?>) XContentHelper.convertToMap(template.template().mappings().uncompressed(), true).v2().get("_doc");
if (sourceAsMap != null) {
@ -115,7 +135,7 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker {
return null;
}
static DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) {
private DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) {
Template template = componentTemplate.template();
List<String> deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings());
if (deprecatedSettings.isEmpty()) {

View file

@ -1,48 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import java.util.ArrayList;
import java.util.List;
class TransformDeprecationChecker implements DeprecationChecker {
public static final String TRANSFORM_DEPRECATION_KEY = "transform_settings";
private final List<TransformConfig> transformConfigs;
TransformDeprecationChecker(List<TransformConfig> transformConfigs) {
this.transformConfigs = transformConfigs;
}
@Override
public boolean enabled(Settings settings) {
// always enabled
return true;
}
@Override
public void check(Components components, ActionListener<CheckResult> deprecationIssueListener) {
ActionListener.completeWith(deprecationIssueListener, () -> {
List<DeprecationIssue> allIssues = new ArrayList<>();
for (var config : transformConfigs) {
allIssues.addAll(config.checkForDeprecations(components.xContentRegistry()));
}
return new CheckResult(getName(), allIssues);
});
}
@Override
public String getName() {
return TRANSFORM_DEPRECATION_KEY;
}
}

View file

@ -6,12 +6,11 @@
*/
package org.elasticsearch.xpack.deprecation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.GroupedActionListener;
import org.elasticsearch.action.support.RefCountingListener;
import org.elasticsearch.action.support.ThreadedActionListener;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.client.internal.OriginSettingClient;
@ -19,14 +18,22 @@ import org.elasticsearch.client.internal.node.NodeClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.Transports;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.action.util.PageParams;
@ -35,24 +42,30 @@ import org.elasticsearch.xpack.core.transform.action.GetTransformAction;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS;
public class TransportDeprecationInfoAction extends TransportMasterNodeReadAction<
DeprecationInfoAction.Request,
DeprecationInfoAction.Response> {
private static final DeprecationChecker ML_CHECKER = new MlDeprecationChecker();
private static final Logger logger = LogManager.getLogger(TransportDeprecationInfoAction.class);
public static final Setting<List<String>> SKIP_DEPRECATIONS_SETTING = Setting.stringListSetting(
"deprecation.skip_deprecated_settings",
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
private static final List<DeprecationChecker> PLUGIN_CHECKERS = List.of(new MlDeprecationChecker());
private final NodeClient client;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final Settings settings;
private final NamedXContentRegistry xContentRegistry;
private volatile List<String> skipTheseDeprecations;
private final NodeDeprecationChecker nodeDeprecationChecker;
private final ClusterDeprecationChecker clusterDeprecationChecker;
private final List<ResourceDeprecationChecker> resourceDeprecationCheckers;
@Inject
public TransportDeprecationInfoAction(
@ -79,10 +92,17 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.settings = settings;
this.xContentRegistry = xContentRegistry;
skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings);
skipTheseDeprecations = SKIP_DEPRECATIONS_SETTING.get(settings);
nodeDeprecationChecker = new NodeDeprecationChecker(threadPool);
clusterDeprecationChecker = new ClusterDeprecationChecker(xContentRegistry);
resourceDeprecationCheckers = List.of(
new IndexDeprecationChecker(indexNameExpressionResolver),
new DataStreamDeprecationChecker(indexNameExpressionResolver),
new TemplateDeprecationChecker(),
new IlmPolicyDeprecationChecker()
);
// Safe to register this here because it happens synchronously before the cluster service is started:
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(DeprecationChecks.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations);
clusterService.getClusterSettings().addSettingsUpdateConsumer(SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations);
}
private <T> void setSkipDeprecations(List<String> skipDeprecations) {
@ -102,56 +122,222 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
ClusterState state,
final ActionListener<DeprecationInfoAction.Response> listener
) {
NodesDeprecationCheckRequest nodeDepReq = new NodesDeprecationCheckRequest("_all");
ClientHelper.executeAsyncWithOrigin(
client,
ClientHelper.DEPRECATION_ORIGIN,
NodesDeprecationCheckAction.INSTANCE,
nodeDepReq,
listener.delegateFailureAndWrap((l, response) -> {
if (response.hasFailures()) {
List<String> failedNodeIds = response.failures()
.stream()
.map(failure -> failure.nodeId() + ": " + failure.getMessage())
.collect(Collectors.toList());
logger.warn("nodes failed to run deprecation checks: {}", failedNodeIds);
for (FailedNodeException failure : response.failures()) {
logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure);
}
}
transformConfigs(l.delegateFailureAndWrap((ll, transformConfigs) -> {
PrecomputedData precomputedData = new PrecomputedData();
try (var refs = new RefCountingListener(checkAndCreateResponse(state, request, precomputedData, listener))) {
nodeDeprecationChecker.check(client, refs.acquire(precomputedData::setOnceNodeSettingsIssues));
transformConfigs(refs.acquire(precomputedData::setOnceTransformConfigs));
DeprecationChecker.Components components = new DeprecationChecker.Components(
xContentRegistry,
settings,
new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN)
);
pluginSettingIssues(
List.of(ML_CHECKER, new TransformDeprecationChecker(transformConfigs)),
components,
new ThreadedActionListener<>(
client.threadPool().generic(),
ll.map(
deprecationIssues -> DeprecationInfoAction.Response.from(
pluginSettingIssues(PLUGIN_CHECKERS, components, refs.acquire(precomputedData::setOncePluginIssues));
}
}
/**
* This is the function that does the bulk of the logic of combining the necessary dependencies together, including the cluster state,
* the precalculated information in {@code context} with the remaining checkers such as the cluster setting checker and the resource
* checkers.This function will run a significant part of the checks and build out the final list of issues that exist in the
* cluster. Because of that, it's important that it does not run in the transport thread that's why it's combined with
* {@link #executeInGenericThreadpool(ActionListener)}.
*
* @param state The cluster state
* @param request The originating request containing the index expressions to evaluate
* @param precomputedData Data from remote requests necessary to construct the response
* @param responseListener The listener expecting the {@link DeprecationInfoAction.Response}
* @return The listener that should be executed after all the remote requests have completed and the {@link PrecomputedData}
* is initialised.
*/
public ActionListener<Void> checkAndCreateResponse(
ClusterState state,
DeprecationInfoAction.Request request,
PrecomputedData precomputedData,
ActionListener<DeprecationInfoAction.Response> responseListener
) {
return executeInGenericThreadpool(
ActionListener.running(
() -> responseListener.onResponse(
checkAndCreateResponse(
state,
indexNameExpressionResolver,
request,
response,
CLUSTER_SETTINGS_CHECKS,
deprecationIssues,
skipTheseDeprecations,
List.of(
new IndexDeprecationChecker(indexNameExpressionResolver, indexToTransformIds(transformConfigs)),
new DataStreamDeprecationChecker(indexNameExpressionResolver),
new TemplateDeprecationChecker(),
new IlmPolicyDeprecationChecker()
)
clusterDeprecationChecker,
resourceDeprecationCheckers,
precomputedData
)
)
)
);
}));
})
}
/**
* This is the function that does the bulk of the logic of combining the necessary dependencies together, including the cluster state,
* the precalculated information in {@code context} with the remaining checkers such as the cluster setting checker and the resource
* checkers.This function will run a significant part of the checks and build out the final list of issues that exist in the
* cluster. It's important that it does not run in the transport thread that's why it's combined with
* {@link #checkAndCreateResponse(ClusterState, DeprecationInfoAction.Request, PrecomputedData, ActionListener)}. We keep this separated
* for testing purposes.
*
* @param state The cluster state
* @param indexNameExpressionResolver Used to resolve indices into their concrete names
* @param request The originating request containing the index expressions to evaluate
* @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the
* indexes specified by indexNames
* @param clusterDeprecationChecker The checker that provides the cluster settings deprecations warnings
* @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type
* to issues grouped by the resource name.
* @param precomputedData data from remote requests necessary to construct the response
* @return The list of deprecation issues found in the cluster
*/
static DeprecationInfoAction.Response checkAndCreateResponse(
ClusterState state,
IndexNameExpressionResolver indexNameExpressionResolver,
DeprecationInfoAction.Request request,
List<String> skipTheseDeprecatedSettings,
ClusterDeprecationChecker clusterDeprecationChecker,
List<ResourceDeprecationChecker> resourceDeprecationCheckers,
PrecomputedData precomputedData
) {
assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive");
// Allow system index access here to prevent deprecation warnings when we call this API
String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
ClusterState stateWithSkippedSettingsRemoved = removeSkippedSettings(state, concreteIndexNames, skipTheseDeprecatedSettings);
List<DeprecationIssue> clusterSettingsIssues = clusterDeprecationChecker.check(
stateWithSkippedSettingsRemoved,
precomputedData.transformConfigs()
);
Map<String, Map<String, List<DeprecationIssue>>> resourceDeprecationIssues = new HashMap<>();
for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) {
Map<String, List<DeprecationIssue>> issues = resourceDeprecationChecker.check(
stateWithSkippedSettingsRemoved,
request,
precomputedData
);
if (issues.isEmpty() == false) {
resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues);
}
}
return new DeprecationInfoAction.Response(
clusterSettingsIssues,
precomputedData.nodeSettingsIssues(),
resourceDeprecationIssues,
precomputedData.pluginIssues()
);
}
/**
* This class holds the results of remote requests. These can be either checks that require remote requests such as
* {@code nodeSettingsIssues} and {@code pluginIssues} or metadata needed for more than one types of checks such as
* {@code transformConfigs}.
*/
public static class PrecomputedData {
private final SetOnce<List<DeprecationIssue>> nodeSettingsIssues = new SetOnce<>();
private final SetOnce<Map<String, List<DeprecationIssue>>> pluginIssues = new SetOnce<>();
private final SetOnce<List<TransformConfig>> transformConfigs = new SetOnce<>();
public void setOnceNodeSettingsIssues(List<DeprecationIssue> nodeSettingsIssues) {
this.nodeSettingsIssues.set(nodeSettingsIssues);
}
public void setOncePluginIssues(Map<String, List<DeprecationIssue>> pluginIssues) {
this.pluginIssues.set(pluginIssues);
}
public void setOnceTransformConfigs(List<TransformConfig> transformConfigs) {
this.transformConfigs.set(transformConfigs);
}
public List<DeprecationIssue> nodeSettingsIssues() {
return nodeSettingsIssues.get();
}
public Map<String, List<DeprecationIssue>> pluginIssues() {
return pluginIssues.get();
}
public List<TransformConfig> transformConfigs() {
return transformConfigs.get();
}
}
/**
* Removes the skipped settings from the selected indices and the component and index templates.
* @param state The cluster state to modify
* @param indexNames The names of the indexes whose settings need to be filtered
* @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the
* indexes specified by indexNames
* @return A modified cluster state with the given settings removed
*/
private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List<String> skipTheseDeprecatedSettings) {
// Short-circuit, no need to reconstruct the cluster state if there are no settings to remove
if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) {
return state;
}
ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state);
Metadata.Builder metadataBuilder = Metadata.builder(state.metadata());
metadataBuilder.transientSettings(
metadataBuilder.transientSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)
);
metadataBuilder.persistentSettings(
metadataBuilder.persistentSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)
);
Map<String, IndexMetadata> indicesBuilder = new HashMap<>(state.getMetadata().indices());
for (String indexName : indexNames) {
IndexMetadata indexMetadata = state.getMetadata().index(indexName);
IndexMetadata.Builder filteredIndexMetadataBuilder = new IndexMetadata.Builder(indexMetadata);
Settings filteredSettings = indexMetadata.getSettings()
.filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false);
filteredIndexMetadataBuilder.settings(filteredSettings);
indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build());
}
metadataBuilder.componentTemplates(state.metadata().componentTemplates().entrySet().stream().map(entry -> {
String templateName = entry.getKey();
ComponentTemplate componentTemplate = entry.getValue();
Template template = componentTemplate.template();
if (template.settings() == null || template.settings().isEmpty()) {
return Tuple.tuple(templateName, componentTemplate);
}
return Tuple.tuple(
templateName,
new ComponentTemplate(
Template.builder(template)
.settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false))
.build(),
componentTemplate.version(),
componentTemplate.metadata(),
componentTemplate.deprecated()
)
);
}).collect(Collectors.toMap(Tuple::v1, Tuple::v2)));
metadataBuilder.indexTemplates(state.metadata().templatesV2().entrySet().stream().map(entry -> {
String templateName = entry.getKey();
ComposableIndexTemplate indexTemplate = entry.getValue();
Template template = indexTemplate.template();
if (template == null || template.settings() == null || template.settings().isEmpty()) {
return Tuple.tuple(templateName, indexTemplate);
}
return Tuple.tuple(
templateName,
indexTemplate.toBuilder()
.template(
Template.builder(indexTemplate.template())
.settings(
indexTemplate.template()
.settings()
.filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)
)
)
.build()
);
}).collect(Collectors.toMap(Tuple::v1, Tuple::v2)));
metadataBuilder.indices(indicesBuilder);
clusterStateBuilder.metadata(metadataBuilder);
return clusterStateBuilder.build();
}
static void pluginSettingIssues(
@ -192,34 +378,21 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
client.execute(
GetTransformAction.INSTANCE,
request,
new ThreadedActionListener<>(
threadPool.generic(),
currentPageListener.delegateFailureAndWrap((delegate, getTransformConfigResponse) -> {
executeInGenericThreadpool(currentPageListener.delegateFailureAndWrap((delegate, getTransformConfigResponse) -> {
var currentPageOfConfigs = getTransformConfigResponse.getTransformConfigurations().stream();
var currentPageSize = currentPage.getFrom() + currentPage.getSize();
var totalTransformConfigCount = getTransformConfigResponse.getTransformConfigurationCount();
if (totalTransformConfigCount >= currentPageSize) {
var nextPage = new PageParams(currentPageSize, PageParams.DEFAULT_SIZE);
transformConfigs(
nextPage,
delegate.map(nextPageOfConfigs -> Stream.concat(currentPageOfConfigs, nextPageOfConfigs))
);
transformConfigs(nextPage, delegate.map(nextPageOfConfigs -> Stream.concat(currentPageOfConfigs, nextPageOfConfigs)));
} else {
delegate.onResponse(currentPageOfConfigs);
}
})
)
}))
);
}
private Map<String, List<String>> indexToTransformIds(List<TransformConfig> transformConfigs) {
return transformConfigs.stream()
.collect(
Collectors.groupingBy(
config -> config.getDestination().getIndex(),
Collectors.mapping(TransformConfig::getId, Collectors.toList())
)
);
private <T> ActionListener<T> executeInGenericThreadpool(ActionListener<T> listener) {
return new ThreadedActionListener<>(threadPool.generic(), listener);
}
}

View file

@ -36,6 +36,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING;
@ -75,10 +76,10 @@ public class TransportNodeDeprecationCheckAction extends TransportNodesAction<
this.pluginsService = pluginsService;
this.licenseState = licenseState;
this.clusterInfoService = clusterInfoService;
skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings);
skipTheseDeprecations = TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.get(settings);
// Safe to register this here because it happens synchronously before the cluster service is started:
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(DeprecationChecks.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations);
.addSettingsUpdateConsumer(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations);
}
private <T> void setSkipDeprecations(List<String> skipDeprecations) {
@ -106,13 +107,13 @@ public class TransportNodeDeprecationCheckAction extends TransportNodesAction<
@Override
protected NodesDeprecationCheckAction.NodeResponse nodeOperation(NodesDeprecationCheckAction.NodeRequest request, Task task) {
return nodeOperation(request, DeprecationChecks.NODE_SETTINGS_CHECKS);
return nodeOperation(request, NodeDeprecationChecks.SINGLE_NODE_CHECKS);
}
NodesDeprecationCheckAction.NodeResponse nodeOperation(
NodesDeprecationCheckAction.NodeRequest request,
List<
DeprecationChecks.NodeDeprecationCheck<
NodeDeprecationChecks.NodeDeprecationCheck<
Settings,
PluginsAndModules,
ClusterState,
@ -130,10 +131,10 @@ public class TransportNodeDeprecationCheckAction extends TransportNodesAction<
.metadata(Metadata.builder(metadata).transientSettings(transientSettings).persistentSettings(persistentSettings).build())
.build();
List<DeprecationIssue> issues = DeprecationInfoAction.filterChecks(
nodeSettingsChecks,
(c) -> c.apply(filteredNodeSettings, pluginsService.info(), filteredClusterState, licenseState)
);
List<DeprecationIssue> issues = nodeSettingsChecks.stream()
.map(c -> c.apply(filteredNodeSettings, pluginsService.info(), filteredClusterState, licenseState))
.filter(Objects::nonNull)
.toList();
DeprecationIssue watermarkIssue = checkDiskLowWatermark(
filteredNodeSettings,
filteredClusterState.metadata().settings(),

View file

@ -69,7 +69,7 @@ public class DataStreamDeprecationCheckerTests extends ESTestCase {
);
// We know that the data stream checks ignore the request.
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState);
assertThat(issuesByDataStream.size(), equalTo(1));
assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true));
assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected)));
@ -91,7 +91,7 @@ public class DataStreamDeprecationCheckerTests extends ESTestCase {
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState);
assertThat(issuesByDataStream.size(), equalTo(0));
}
@ -137,7 +137,7 @@ public class DataStreamDeprecationCheckerTests extends ESTestCase {
)
);
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState);
assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true));
assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected)));
}
@ -297,7 +297,7 @@ public class DataStreamDeprecationCheckerTests extends ESTestCase {
)
);
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByDataStream = checker.check(clusterState);
assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true));
assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected)));
}

View file

@ -1,48 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
public class DeprecationChecksTests extends ESTestCase {
public void testFilterChecks() {
DeprecationIssue issue = createRandomDeprecationIssue();
int numChecksPassed = randomIntBetween(0, 5);
int numChecksFailed = 10 - numChecksPassed;
List<Supplier<DeprecationIssue>> checks = new ArrayList<>();
for (int i = 0; i < numChecksFailed; i++) {
checks.add(() -> issue);
}
for (int i = 0; i < numChecksPassed; i++) {
checks.add(() -> null);
}
List<DeprecationIssue> filteredIssues = DeprecationInfoAction.filterChecks(checks, Supplier::get);
assertThat(filteredIssues.size(), equalTo(numChecksFailed));
}
private static DeprecationIssue createRandomDeprecationIssue() {
String details = randomBoolean() ? randomAlphaOfLength(10) : null;
return new DeprecationIssue(
randomFrom(DeprecationIssue.Level.values()),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
details,
randomBoolean(),
randomMap(1, 5, () -> Tuple.tuple(randomAlphaOfLength(4), randomAlphaOfLength(4)))
);
}
}

View file

@ -6,50 +6,17 @@
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.indices.TestIndexNameExpressionResolver;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue.Level;
import org.junit.Assert;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.core.IsEqual.equalTo;
public class DeprecationInfoActionResponseTests extends AbstractWireSerializingTestCase<DeprecationInfoAction.Response> {
@Override
@ -153,330 +120,11 @@ public class DeprecationInfoActionResponseTests extends AbstractWireSerializingT
return DeprecationInfoAction.Response::new;
}
public void testFrom() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all");
mapping.field("enabled", false);
mapping.endObject().endObject();
Metadata metadata = Metadata.builder()
.put(
IndexMetadata.builder("test")
.putMapping(Strings.toString(mapping))
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
)
.build();
DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("test", new TransportAddress(TransportAddress.META_ADDRESS, 9300));
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance();
boolean clusterIssueFound = randomBoolean();
boolean nodeIssueFound = randomBoolean();
boolean indexIssueFound = randomBoolean();
boolean dataStreamIssueFound = randomBoolean();
boolean indexTemplateIssueFound = randomBoolean();
boolean componentTemplateIssueFound = randomBoolean();
boolean ilmPolicyIssueFound = randomBoolean();
DeprecationIssue foundIssue = createTestDeprecationIssue();
List<Function<ClusterState, DeprecationIssue>> clusterSettingsChecks = List.of((s) -> clusterIssueFound ? foundIssue : null);
List<ResourceDeprecationChecker> resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> {
if (indexIssueFound) {
return Map.of("test", List.of(foundIssue));
}
return Map.of();
}), createResourceChecker("data_streams", (cs, req) -> {
if (dataStreamIssueFound) {
return Map.of("my-ds", List.of(foundIssue));
}
return Map.of();
}), createResourceChecker("templates", (cs, req) -> {
Map<String, List<DeprecationIssue>> issues = new HashMap<>();
if (componentTemplateIssueFound) {
issues.put("my-component-template", List.of(foundIssue));
}
if (indexTemplateIssueFound) {
issues.put("my-index-template", List.of(foundIssue));
}
return issues;
}), createResourceChecker("ilm_policies", (cs, req) -> {
if (ilmPolicyIssueFound) {
return Map.of("my-policy", List.of(foundIssue));
}
return Map.of();
}));
NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse(
new ClusterName(randomAlphaOfLength(5)),
nodeIssueFound ? List.of(new NodesDeprecationCheckAction.NodeResponse(discoveryNode, List.of(foundIssue))) : List.of(),
List.of()
);
DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY);
DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(
state,
resolver,
request,
nodeDeprecationIssues,
clusterSettingsChecks,
new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings
List.of(),
resourceCheckers
);
if (clusterIssueFound) {
assertThat(response.getClusterSettingsIssues(), equalTo(List.of(foundIssue)));
} else {
assertThat(response.getClusterSettingsIssues(), empty());
}
if (nodeIssueFound) {
String details = foundIssue.getDetails() != null ? foundIssue.getDetails() + " " : "";
DeprecationIssue mergedFoundIssue = new DeprecationIssue(
foundIssue.getLevel(),
foundIssue.getMessage(),
foundIssue.getUrl(),
details + "(nodes impacted: [" + discoveryNode.getName() + "])",
foundIssue.isResolveDuringRollingUpgrade(),
foundIssue.getMeta()
);
assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue)));
} else {
assertTrue(response.getNodeSettingsIssues().isEmpty());
}
if (indexIssueFound) {
assertThat(response.getIndexSettingsIssues(), equalTo(Map.of("test", List.of(foundIssue))));
} else {
assertTrue(response.getIndexSettingsIssues().isEmpty());
}
if (dataStreamIssueFound) {
assertThat(response.getDataStreamDeprecationIssues(), equalTo(Map.of("my-ds", List.of(foundIssue))));
} else {
assertTrue(response.getDataStreamDeprecationIssues().isEmpty());
}
if (ilmPolicyIssueFound) {
assertThat(response.getIlmPolicyDeprecationIssues(), equalTo(Map.of("my-policy", List.of(foundIssue))));
} else {
assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty());
}
if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) {
assertTrue(response.getTemplateDeprecationIssues().isEmpty());
} else {
if (componentTemplateIssueFound) {
assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), equalTo(List.of(foundIssue)));
}
if (indexTemplateIssueFound) {
assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), equalTo(List.of(foundIssue)));
}
}
}
public void testFromWithMergeableNodeIssues() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all");
mapping.field("enabled", false);
mapping.endObject().endObject();
Metadata metadata = Metadata.builder()
.put(
IndexMetadata.builder("test")
.putMapping(Strings.toString(mapping))
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
)
.build();
DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1")
.name("node1")
.ephemeralId("ephemeralId1")
.address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300))
.roles(Set.of())
.build();
DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2")
.name("node2")
.ephemeralId("ephemeralId2")
.address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500))
.roles(Set.of())
.build();
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance();
Map<String, Object> metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3"));
Map<String, Object> metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3"));
DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1);
DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2);
List<Function<ClusterState, DeprecationIssue>> clusterSettingsChecks = List.of();
List<ResourceDeprecationChecker> resourceCheckers = List.of();
NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse(
new ClusterName(randomAlphaOfLength(5)),
Arrays.asList(
new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)),
new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2))
),
List.of()
);
DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY);
DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(
state,
resolver,
request,
nodeDeprecationIssues,
clusterSettingsChecks,
new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings
List.of(),
resourceCheckers
);
String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : "";
DeprecationIssue mergedFoundIssue = new DeprecationIssue(
foundIssue1.getLevel(),
foundIssue1.getMessage(),
foundIssue1.getUrl(),
details + "(nodes impacted: [" + node1.getName() + ", " + node2.getName() + "])",
foundIssue1.isResolveDuringRollingUpgrade(),
foundIssue2.getMeta()
);
assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue)));
}
public void testRemoveSkippedSettings() {
Settings.Builder settingsBuilder = settings(IndexVersion.current());
settingsBuilder.put("some.deprecated.property", "someValue1");
settingsBuilder.put("some.other.bad.deprecated.property", "someValue2");
settingsBuilder.put("some.undeprecated.property", "someValue3");
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
Settings inputSettings = settingsBuilder.build();
IndexMetadata dataStreamIndexMetadata = IndexMetadata.builder("ds-test-index-1")
.settings(inputSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null);
ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder()
.template(Template.builder().settings(inputSettings))
.build();
Metadata metadata = Metadata.builder()
.put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0))
.put(dataStreamIndexMetadata, true)
.put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build())
.indexTemplates(
Map.of(
"my-index-template",
indexTemplate,
"empty-template",
ComposableIndexTemplate.builder().indexPatterns(List.of("random")).build()
)
)
.componentTemplates(Map.of("my-component-template", componentTemplate))
.persistentSettings(inputSettings)
.build();
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance();
AtomicReference<Settings> visibleClusterSettings = new AtomicReference<>();
List<Function<ClusterState, DeprecationIssue>> clusterSettingsChecks = List.of((s) -> {
visibleClusterSettings.set(s.getMetadata().settings());
return null;
});
AtomicReference<Settings> visibleIndexSettings = new AtomicReference<>();
AtomicReference<Settings> visibleComponentTemplateSettings = new AtomicReference<>();
AtomicReference<Settings> visibleIndexTemplateSettings = new AtomicReference<>();
AtomicInteger backingIndicesCount = new AtomicInteger(0);
List<ResourceDeprecationChecker> resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> {
for (String indexName : resolver.concreteIndexNames(cs, req)) {
visibleIndexSettings.set(cs.metadata().getProject().index(indexName).getSettings());
}
return Map.of();
}), createResourceChecker("data_streams", (cs, req) -> {
cs.metadata().getProject().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size()));
return Map.of();
}), createResourceChecker("templates", (cs, req) -> {
cs.metadata()
.getProject()
.componentTemplates()
.values()
.forEach(template -> visibleComponentTemplateSettings.set(template.template().settings()));
cs.metadata().getProject().templatesV2().values().forEach(template -> {
if (template.template() != null && template.template().settings() != null) {
visibleIndexTemplateSettings.set(template.template().settings());
}
});
return Map.of();
}));
NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse(
new ClusterName(randomAlphaOfLength(5)),
List.of(),
List.of()
);
DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY);
DeprecationInfoAction.Response.from(
state,
resolver,
request,
nodeDeprecationIssues,
clusterSettingsChecks,
new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings
List.of("some.deprecated.property", "some.other.*.deprecated.property"),
resourceCheckers
);
settingsBuilder = settings(IndexVersion.current());
settingsBuilder.put("some.undeprecated.property", "someValue3");
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
Settings expectedSettings = settingsBuilder.build();
Settings resultClusterSettings = visibleClusterSettings.get();
Assert.assertNotNull(resultClusterSettings);
Assert.assertEquals(expectedSettings, visibleClusterSettings.get());
Settings resultIndexSettings = visibleIndexSettings.get();
Assert.assertNotNull(resultIndexSettings);
Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property"));
Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5"));
Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property"));
Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property"));
assertThat(backingIndicesCount.get(), equalTo(1));
Assert.assertNotNull(visibleComponentTemplateSettings.get());
Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get());
Assert.assertNotNull(visibleIndexTemplateSettings.get());
Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get());
}
public void testCtorFailure() {
Map<String, List<DeprecationIssue>> indexNames = Stream.generate(() -> randomAlphaOfLength(10))
.limit(10)
.collect(Collectors.toMap(Function.identity(), (_k) -> List.of()));
Map<String, List<DeprecationIssue>> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10))
.limit(10)
.collect(Collectors.toMap(Function.identity(), (_k) -> List.of()));
Set<String> shouldCauseFailure = new HashSet<>(RESERVED_NAMES);
for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) {
Map<String, List<DeprecationIssue>> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream()
.collect(Collectors.toMap(Function.identity(), (_k) -> List.of()));
expectThrows(
ElasticsearchStatusException.class,
() -> new DeprecationInfoAction.Response(
List.of(),
List.of(),
Map.of("data_streams", dataStreamNames, "index_settings", indexNames),
pluginSettingsIssues
)
);
}
}
private static DeprecationIssue createTestDeprecationIssue() {
static DeprecationIssue createTestDeprecationIssue() {
return createTestDeprecationIssue(randomMap(1, 5, () -> Tuple.tuple(randomAlphaOfLength(4), randomAlphaOfLength(4))));
}
private static DeprecationIssue createTestDeprecationIssue(Map<String, Object> metaMap) {
static DeprecationIssue createTestDeprecationIssue(Map<String, Object> metaMap) {
String details = randomBoolean() ? randomAlphaOfLength(10) : null;
return new DeprecationIssue(
randomFrom(Level.values()),
@ -488,7 +136,7 @@ public class DeprecationInfoActionResponseTests extends AbstractWireSerializingT
);
}
private static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seedIssue, Map<String, Object> metaMap) {
static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seedIssue, Map<String, Object> metaMap) {
return new DeprecationIssue(
seedIssue.getLevel(),
seedIssue.getMessage(),
@ -499,27 +147,9 @@ public class DeprecationInfoActionResponseTests extends AbstractWireSerializingT
);
}
private static List<DeprecationIssue> randomDeprecationIssues() {
static List<DeprecationIssue> randomDeprecationIssues() {
return Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue)
.limit(randomIntBetween(0, 10))
.collect(Collectors.toList());
}
private static ResourceDeprecationChecker createResourceChecker(
String name,
BiFunction<ClusterState, DeprecationInfoAction.Request, Map<String, List<DeprecationIssue>>> check
) {
return new ResourceDeprecationChecker() {
@Override
public Map<String, List<DeprecationIssue>> check(ClusterState clusterState, DeprecationInfoAction.Request request) {
return check.apply(clusterState, request);
}
@Override
public String getName() {
return name;
}
};
}
}

View file

@ -90,7 +90,7 @@ public class IlmPolicyDeprecationCheckerTests extends ESTestCase {
)
.build();
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState);
final DeprecationIssue expected = new DeprecationIssue(
DeprecationIssue.Level.WARNING,
"Configuring tiers via filtered allocation is not recommended.",
@ -136,7 +136,7 @@ public class IlmPolicyDeprecationCheckerTests extends ESTestCase {
)
.build();
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState);
final DeprecationIssue expected = new DeprecationIssue(
DeprecationIssue.Level.WARNING,
"ILM policy [deprecated-action] contains the action 'freeze' that is deprecated and will be removed in a future version.",

View file

@ -29,7 +29,11 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver;
import org.elasticsearch.snapshots.SearchableSnapshotsSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.elasticsearch.xpack.core.transform.transforms.DestConfig;
import org.elasticsearch.xpack.core.transform.transforms.SourceConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -42,10 +46,18 @@ import static org.hamcrest.Matchers.hasItem;
public class IndexDeprecationCheckerTests extends ESTestCase {
private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099);
private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance();
private final IndexDeprecationChecker checker = new IndexDeprecationChecker(indexNameExpressionResolver);
private final TransportDeprecationInfoAction.PrecomputedData emptyPrecomputedData =
new TransportDeprecationInfoAction.PrecomputedData();
private final IndexMetadata.State indexMetdataState;
public IndexDeprecationCheckerTests(@Name("indexMetadataState") IndexMetadata.State indexMetdataState) {
this.indexMetdataState = indexMetdataState;
emptyPrecomputedData.setOnceNodeSettingsIssues(List.of());
emptyPrecomputedData.setOncePluginIssues(Map.of());
emptyPrecomputedData.setOnceTransformConfigs(List.of());
}
@ParametersFactory
@ -53,11 +65,6 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
return List.of(new Object[] { IndexMetadata.State.OPEN }, new Object[] { IndexMetadata.State.CLOSE });
}
private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099);
private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance();
private final IndexDeprecationChecker checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of());
public void testOldIndicesCheck() {
IndexMetadata indexMetadata = IndexMetadata.builder("test")
.settings(settings(OLD_VERSION))
@ -79,14 +86,15 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
);
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
List<DeprecationIssue> issues = issuesByIndex.get("test");
assertEquals(singletonList(expected), issues);
}
public void testOldTransformIndicesCheck() {
var checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of("test", List.of("test-transform")));
var checker = new IndexDeprecationChecker(indexNameExpressionResolver);
var indexMetadata = indexMetadata("test", OLD_VERSION);
var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE)
.metadata(Metadata.builder().put(indexMetadata, true))
@ -100,15 +108,15 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
false,
Map.of("reindex_required", true, "transform_ids", List.of("test-transform"))
);
var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS));
var issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
createContextWithTransformConfigs(Map.of("test", List.of("test-transform")))
);
assertEquals(singletonList(expected), issuesByIndex.get("test"));
}
public void testOldIndicesCheckWithMultipleTransforms() {
var checker = new IndexDeprecationChecker(
indexNameExpressionResolver,
Map.of("test", List.of("test-transform1", "test-transform2"))
);
var indexMetadata = indexMetadata("test", OLD_VERSION);
var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE)
.metadata(Metadata.builder().put(indexMetadata, true))
@ -122,15 +130,15 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
false,
Map.of("reindex_required", true, "transform_ids", List.of("test-transform1", "test-transform2"))
);
var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS));
var issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
createContextWithTransformConfigs(Map.of("test", List.of("test-transform1", "test-transform2")))
);
assertEquals(singletonList(expected), issuesByIndex.get("test"));
}
public void testMultipleOldIndicesCheckWithTransforms() {
var checker = new IndexDeprecationChecker(
indexNameExpressionResolver,
Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2"))
);
var indexMetadata1 = indexMetadata("test1", OLD_VERSION);
var indexMetadata2 = indexMetadata("test2", OLD_VERSION);
var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE)
@ -161,7 +169,11 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
)
)
);
var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS));
var issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
createContextWithTransformConfigs(Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2")))
);
assertEquals(expected, issuesByIndex);
}
@ -215,7 +227,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
assertThat(issuesByIndex.size(), equalTo(0));
}
@ -236,7 +249,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
assertThat(issuesByIndex.size(), equalTo(0));
}
@ -263,7 +277,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
);
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
clusterState,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
assertTrue(issuesByIndex.containsKey("test"));
assertEquals(List.of(expected), issuesByIndex.get("test"));
@ -285,7 +300,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
state,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
List<DeprecationIssue> issues = issuesByIndex.get("test");
assertThat(
@ -328,7 +344,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
state,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
assertThat(issuesByIndex.size(), equalTo(0));
}
@ -348,7 +365,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
state,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
final String expectedUrl =
"https://www.elastic.co/guide/en/elasticsearch/reference/7.13/breaking-changes-7.13.html#deprecate-shared-data-path-setting";
@ -382,7 +400,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
state,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
assertThat(
issuesByIndex.get("test"),
@ -425,7 +444,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
state,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
DeprecationIssue expected = new DeprecationIssue(
DeprecationIssue.Level.CRITICAL,
@ -456,7 +476,8 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
.build();
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
state,
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
emptyPrecomputedData
);
assertThat(
issuesByIndex.get("test"),
@ -484,4 +505,23 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
}
return builder.build();
}
private TransportDeprecationInfoAction.PrecomputedData createContextWithTransformConfigs(Map<String, List<String>> indexToTransform) {
List<TransformConfig> transforms = new ArrayList<>();
for (Map.Entry<String, List<String>> entry : indexToTransform.entrySet()) {
String index = entry.getKey();
for (String transform : entry.getValue()) {
transforms.add(
TransformConfig.builder()
.setId(transform)
.setSource(new SourceConfig(randomAlphaOfLength(10)))
.setDest(new DestConfig(index, List.of(), null))
.build()
);
}
}
TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData();
precomputedData.setOnceTransformConfigs(transforms);
return precomputedData;
}
}

View file

@ -0,0 +1,74 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoActionResponseTests.createTestDeprecationIssue;
import static org.hamcrest.core.IsEqual.equalTo;
public class NodeDeprecationCheckerTests extends ESTestCase {
public void testMergingNodeIssues() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all");
mapping.field("enabled", false);
mapping.endObject().endObject();
DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1")
.name("node1")
.ephemeralId("ephemeralId1")
.address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300))
.roles(Set.of())
.build();
DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2")
.name("node2")
.ephemeralId("ephemeralId2")
.address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500))
.roles(Set.of())
.build();
Map<String, Object> metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3"));
Map<String, Object> metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3"));
DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1);
DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2);
NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse(
new ClusterName(randomAlphaOfLength(5)),
Arrays.asList(
new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)),
new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2))
),
List.of()
);
List<DeprecationIssue> result = NodeDeprecationChecker.reduceToDeprecationIssues(nodeDeprecationIssues);
String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : "";
DeprecationIssue mergedFoundIssue = new DeprecationIssue(
foundIssue1.getLevel(),
foundIssue1.getMessage(),
foundIssue1.getUrl(),
details + "(nodes impacted: [" + node1.getName() + ", " + node2.getName() + "])",
foundIssue1.isResolveDuringRollingUpgrade(),
foundIssue2.getMeta()
);
assertThat(result, equalTo(List.of(mergedFoundIssue)));
}
}

View file

@ -30,9 +30,11 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.deprecation.DeprecationChecks.NODE_SETTINGS_CHECKS;
import static org.elasticsearch.xpack.deprecation.NodeDeprecationChecks.SINGLE_NODE_CHECKS;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.not;
@ -154,8 +156,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir())
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
final String expectedUrl =
@ -209,8 +211,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
}
final Settings settings = builder.build();
final List<DeprecationIssue> deprecationIssues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
final List<DeprecationIssue> deprecationIssues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -235,8 +237,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
void monitoringSetting(String settingKey, String value) {
Settings settings = Settings.builder().put(settingKey, value).build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings";
@ -259,8 +261,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
String settingKey = "xpack.monitoring.exporters.test." + suffix;
Settings settings = Settings.builder().put(settingKey, value).build();
final XPackLicenseState licenseState = new XPackLicenseState(() -> 0);
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState)
);
final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings";
@ -284,8 +286,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
String subSettingKey = settingKey + ".subsetting";
Settings settings = Settings.builder().put(subSettingKey, value).build();
final XPackLicenseState licenseState = new XPackLicenseState(() -> 0);
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState)
);
final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings";
@ -310,8 +312,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
secureSettings.setString(settingKey, value);
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
final XPackLicenseState licenseState = new XPackLicenseState(() -> 0);
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState)
);
final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings";
@ -457,8 +459,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
public void testExporterUseIngestPipelineSettings() {
Settings settings = Settings.builder().put("xpack.monitoring.exporters.test.use_ingest", true).build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -483,8 +485,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put("xpack.monitoring.exporters.test.index.pipeline.master_timeout", TimeValue.timeValueSeconds(10))
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -508,8 +510,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
public void testExporterCreateLegacyTemplateSetting() {
Settings settings = Settings.builder().put("xpack.monitoring.exporters.test.index.template.create_legacy_templates", true).build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -535,8 +537,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put(ScriptService.SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey(), "use-context")
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -564,8 +566,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put(ScriptService.SCRIPT_MAX_COMPILATIONS_RATE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "456/7m")
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -601,8 +603,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "2453")
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -639,8 +641,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), 200)
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -676,8 +678,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "2d")
.build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -708,8 +710,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
public void testEnforceDefaultTierPreferenceSetting() {
Settings settings = Settings.builder().put(DataTier.ENFORCE_DEFAULT_TIER_PREFERENCE_SETTING.getKey(), randomBoolean()).build();
List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
NODE_SETTINGS_CHECKS,
List<DeprecationIssue> issues = filterChecks(
SINGLE_NODE_CHECKS,
c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -731,8 +733,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
}
private List<DeprecationIssue> getDeprecationIssues(Settings settings, PluginsAndModules pluginsAndModules) {
final List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
DeprecationChecks.NODE_SETTINGS_CHECKS,
final List<DeprecationIssue> issues = filterChecks(
NodeDeprecationChecks.SINGLE_NODE_CHECKS,
c -> c.apply(settings, pluginsAndModules, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0))
);
@ -799,8 +801,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
}
Metadata metadata = metadataBuilder.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
final List<DeprecationIssue> issues = DeprecationChecks.filterChecks(
DeprecationChecks.NODE_SETTINGS_CHECKS,
final List<DeprecationIssue> issues = filterChecks(
NodeDeprecationChecks.SINGLE_NODE_CHECKS,
c -> c.apply(nodettings, pluginsAndModules, clusterState, licenseState)
);
@ -832,4 +834,8 @@ public class NodeDeprecationChecksTests extends ESTestCase {
);
assertThat(issues, hasItem(expected));
}
static <T> List<DeprecationIssue> filterChecks(List<T> checks, Function<T, DeprecationIssue> mapper) {
return checks.stream().map(mapper).filter(Objects::nonNull).toList();
}
}

View file

@ -48,7 +48,7 @@ public class TemplateDeprecationCheckerTests extends ESTestCase {
)
.build();
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState);
final DeprecationIssue expected = new DeprecationIssue(
DeprecationIssue.Level.CRITICAL,
SourceFieldMapper.DEPRECATION_WARNING,
@ -81,7 +81,7 @@ public class TemplateDeprecationCheckerTests extends ESTestCase {
)
.build();
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState);
final DeprecationIssue expected = new DeprecationIssue(
DeprecationIssue.Level.WARNING,
"Configuring tiers via filtered allocation is not recommended.",
@ -121,7 +121,7 @@ public class TemplateDeprecationCheckerTests extends ESTestCase {
)
.build();
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState);
final DeprecationIssue expected = new DeprecationIssue(
DeprecationIssue.Level.WARNING,
"Configuring tiers via filtered allocation is not recommended.",
@ -164,7 +164,7 @@ public class TemplateDeprecationCheckerTests extends ESTestCase {
)
.build();
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState, null);
Map<String, List<DeprecationIssue>> issuesByComponentTemplate = checker.check(clusterState);
final DeprecationIssue expectedIndexTemplateIssue = new DeprecationIssue(
DeprecationIssue.Level.WARNING,
"Configuring tiers via filtered allocation is not recommended.",

View file

@ -6,22 +6,295 @@
*/
package org.elasticsearch.xpack.deprecation;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.indices.TestIndexNameExpressionResolver;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xpack.core.deprecation.DeprecationIssue;
import org.hamcrest.core.IsEqual;
import org.junit.Assert;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES;
import static org.elasticsearch.xpack.deprecation.DeprecationInfoActionResponseTests.createTestDeprecationIssue;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TransportDeprecationInfoActionTests extends ESTestCase {
public void testCheckAndCreateResponse() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all");
mapping.field("enabled", false);
mapping.endObject().endObject();
Metadata metadata = Metadata.builder()
.put(
IndexMetadata.builder("test")
.putMapping(Strings.toString(mapping))
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
)
.build();
DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("test", new TransportAddress(TransportAddress.META_ADDRESS, 9300));
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance();
boolean clusterIssueFound = randomBoolean();
boolean nodeIssueFound = randomBoolean();
boolean indexIssueFound = randomBoolean();
boolean dataStreamIssueFound = randomBoolean();
boolean indexTemplateIssueFound = randomBoolean();
boolean componentTemplateIssueFound = randomBoolean();
boolean ilmPolicyIssueFound = randomBoolean();
DeprecationIssue foundIssue = createTestDeprecationIssue();
ClusterDeprecationChecker clusterDeprecationChecker = mock(ClusterDeprecationChecker.class);
when(clusterDeprecationChecker.check(any(), any())).thenReturn(clusterIssueFound ? List.of(foundIssue) : List.of());
List<ResourceDeprecationChecker> resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> {
if (indexIssueFound) {
return Map.of("test", List.of(foundIssue));
}
return Map.of();
}), createResourceChecker("data_streams", (cs, req) -> {
if (dataStreamIssueFound) {
return Map.of("my-ds", List.of(foundIssue));
}
return Map.of();
}), createResourceChecker("templates", (cs, req) -> {
Map<String, List<DeprecationIssue>> issues = new HashMap<>();
if (componentTemplateIssueFound) {
issues.put("my-component-template", List.of(foundIssue));
}
if (indexTemplateIssueFound) {
issues.put("my-index-template", List.of(foundIssue));
}
return issues;
}), createResourceChecker("ilm_policies", (cs, req) -> {
if (ilmPolicyIssueFound) {
return Map.of("my-policy", List.of(foundIssue));
}
return Map.of();
}));
List<DeprecationIssue> nodeDeprecationIssues = nodeIssueFound ? List.of(foundIssue) : List.of();
DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY);
TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData();
precomputedData.setOnceTransformConfigs(List.of());
precomputedData.setOncePluginIssues(Map.of());
precomputedData.setOnceNodeSettingsIssues(nodeDeprecationIssues);
DeprecationInfoAction.Response response = TransportDeprecationInfoAction.checkAndCreateResponse(
state,
resolver,
request,
List.of(),
clusterDeprecationChecker,
resourceCheckers,
precomputedData
);
if (clusterIssueFound) {
assertThat(response.getClusterSettingsIssues(), IsEqual.equalTo(List.of(foundIssue)));
} else {
assertThat(response.getClusterSettingsIssues(), empty());
}
if (nodeIssueFound) {
assertThat(response.getNodeSettingsIssues(), IsEqual.equalTo(List.of(foundIssue)));
} else {
assertTrue(response.getNodeSettingsIssues().isEmpty());
}
if (indexIssueFound) {
assertThat(response.getIndexSettingsIssues(), IsEqual.equalTo(Map.of("test", List.of(foundIssue))));
} else {
assertTrue(response.getIndexSettingsIssues().isEmpty());
}
if (dataStreamIssueFound) {
assertThat(response.getDataStreamDeprecationIssues(), IsEqual.equalTo(Map.of("my-ds", List.of(foundIssue))));
} else {
assertTrue(response.getDataStreamDeprecationIssues().isEmpty());
}
if (ilmPolicyIssueFound) {
assertThat(response.getIlmPolicyDeprecationIssues(), IsEqual.equalTo(Map.of("my-policy", List.of(foundIssue))));
} else {
assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty());
}
if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) {
assertTrue(response.getTemplateDeprecationIssues().isEmpty());
} else {
if (componentTemplateIssueFound) {
assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), IsEqual.equalTo(List.of(foundIssue)));
}
if (indexTemplateIssueFound) {
assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), IsEqual.equalTo(List.of(foundIssue)));
}
}
}
public void testRemoveSkippedSettings() {
Settings.Builder settingsBuilder = settings(IndexVersion.current());
settingsBuilder.put("some.deprecated.property", "someValue1");
settingsBuilder.put("some.other.bad.deprecated.property", "someValue2");
settingsBuilder.put("some.undeprecated.property", "someValue3");
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
Settings inputSettings = settingsBuilder.build();
IndexMetadata dataStreamIndexMetadata = IndexMetadata.builder("ds-test-index-1")
.settings(inputSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null);
ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder()
.template(Template.builder().settings(inputSettings))
.build();
Metadata metadata = Metadata.builder()
.put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0))
.put(dataStreamIndexMetadata, true)
.put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build())
.indexTemplates(
Map.of(
"my-index-template",
indexTemplate,
"empty-template",
ComposableIndexTemplate.builder().indexPatterns(List.of("random")).build()
)
)
.componentTemplates(Map.of("my-component-template", componentTemplate))
.persistentSettings(inputSettings)
.build();
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance();
AtomicReference<Settings> visibleClusterSettings = new AtomicReference<>();
ClusterDeprecationChecker clusterDeprecationChecker = mock(ClusterDeprecationChecker.class);
when(clusterDeprecationChecker.check(any(), any())).thenAnswer(invocationOnMock -> {
ClusterState observedState = invocationOnMock.getArgument(0);
visibleClusterSettings.set(observedState.getMetadata().settings());
return List.of();
});
AtomicReference<Settings> visibleIndexSettings = new AtomicReference<>();
AtomicReference<Settings> visibleComponentTemplateSettings = new AtomicReference<>();
AtomicReference<Settings> visibleIndexTemplateSettings = new AtomicReference<>();
AtomicInteger backingIndicesCount = new AtomicInteger(0);
List<ResourceDeprecationChecker> resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> {
for (String indexName : resolver.concreteIndexNames(cs, req)) {
visibleIndexSettings.set(cs.metadata().index(indexName).getSettings());
}
return Map.of();
}), createResourceChecker("data_streams", (cs, req) -> {
cs.metadata().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size()));
return Map.of();
}), createResourceChecker("templates", (cs, req) -> {
cs.metadata()
.componentTemplates()
.values()
.forEach(template -> visibleComponentTemplateSettings.set(template.template().settings()));
cs.metadata().templatesV2().values().forEach(template -> {
if (template.template() != null && template.template().settings() != null) {
visibleIndexTemplateSettings.set(template.template().settings());
}
});
return Map.of();
}));
TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData();
precomputedData.setOnceTransformConfigs(List.of());
precomputedData.setOncePluginIssues(Map.of());
precomputedData.setOnceNodeSettingsIssues(List.of());
DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY);
TransportDeprecationInfoAction.checkAndCreateResponse(
state,
resolver,
request,
List.of("some.deprecated.property", "some.other.*.deprecated.property"),
clusterDeprecationChecker,
resourceCheckers,
precomputedData
);
settingsBuilder = settings(IndexVersion.current());
settingsBuilder.put("some.undeprecated.property", "someValue3");
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
Settings expectedSettings = settingsBuilder.build();
Settings resultClusterSettings = visibleClusterSettings.get();
Assert.assertNotNull(resultClusterSettings);
Assert.assertEquals(expectedSettings, visibleClusterSettings.get());
Settings resultIndexSettings = visibleIndexSettings.get();
Assert.assertNotNull(resultIndexSettings);
Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property"));
Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5"));
Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property"));
Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property"));
assertThat(backingIndicesCount.get(), IsEqual.equalTo(1));
Assert.assertNotNull(visibleComponentTemplateSettings.get());
Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get());
Assert.assertNotNull(visibleIndexTemplateSettings.get());
Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get());
}
public void testCtorFailure() {
Map<String, List<DeprecationIssue>> indexNames = Stream.generate(() -> randomAlphaOfLength(10))
.limit(10)
.collect(Collectors.toMap(Function.identity(), (_k) -> List.of()));
Map<String, List<DeprecationIssue>> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10))
.limit(10)
.collect(Collectors.toMap(Function.identity(), (_k) -> List.of()));
Set<String> shouldCauseFailure = new HashSet<>(RESERVED_NAMES);
for (int i = 0; i < randomIntBetween(1, 100); i++) {
Map<String, List<DeprecationIssue>> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream()
.collect(Collectors.toMap(Function.identity(), (_k) -> List.of()));
expectThrows(
ElasticsearchStatusException.class,
() -> new DeprecationInfoAction.Response(
List.of(),
List.of(),
Map.of("data_streams", dataStreamNames, "index_settings", indexNames),
pluginSettingsIssues
)
);
}
}
public void testPluginSettingIssues() {
DeprecationChecker.Components components = new DeprecationChecker.Components(null, Settings.EMPTY, null);
PlainActionFuture<Map<String, List<DeprecationIssue>>> future = new PlainActionFuture<>();
@ -65,6 +338,28 @@ public class TransportDeprecationInfoActionTests extends ESTestCase {
assertThat(exception.getCause().getMessage(), containsString("boom"));
}
private static ResourceDeprecationChecker createResourceChecker(
String name,
BiFunction<ClusterState, DeprecationInfoAction.Request, Map<String, List<DeprecationIssue>>> check
) {
return new ResourceDeprecationChecker() {
@Override
public Map<String, List<DeprecationIssue>> check(
ClusterState clusterState,
DeprecationInfoAction.Request request,
TransportDeprecationInfoAction.PrecomputedData precomputedData
) {
return check.apply(clusterState, request);
}
@Override
public String getName() {
return name;
}
};
}
private static class NamedChecker implements DeprecationChecker {
private final String name;

View file

@ -61,7 +61,7 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
settingsBuilder.put("some.undeprecated.property", "someValue3");
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
settingsBuilder.putList(
DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(),
TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(),
List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property")
);
Settings nodeSettings = settingsBuilder.build();
@ -73,7 +73,10 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
ClusterService clusterService = Mockito.mock(ClusterService.class);
when(clusterService.state()).thenReturn(clusterState);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, Set.of(DeprecationChecks.SKIP_DEPRECATIONS_SETTING));
ClusterSettings clusterSettings = new ClusterSettings(
nodeSettings,
Set.of(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING)
);
when((clusterService.getClusterSettings())).thenReturn(clusterSettings);
DiscoveryNode node = Mockito.mock(DiscoveryNode.class);
when(node.getId()).thenReturn("mock-node");
@ -98,7 +101,7 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
NodesDeprecationCheckAction.NodeRequest nodeRequest = null;
AtomicReference<Settings> visibleNodeSettings = new AtomicReference<>();
AtomicReference<Settings> visibleClusterStateMetadataSettings = new AtomicReference<>();
DeprecationChecks.NodeDeprecationCheck<
NodeDeprecationChecks.NodeDeprecationCheck<
Settings,
PluginsAndModules,
ClusterState,
@ -109,7 +112,7 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
return null;
};
java.util.List<
DeprecationChecks.NodeDeprecationCheck<
NodeDeprecationChecks.NodeDeprecationCheck<
Settings,
PluginsAndModules,
ClusterState,
@ -120,7 +123,7 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
settingsBuilder.put("some.undeprecated.property", "someValue3");
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
settingsBuilder.putList(
DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(),
TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(),
List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property")
);
Settings expectedSettings = settingsBuilder.build();
@ -131,7 +134,7 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
// Testing that the setting is dynamically updatable:
Settings newSettings = Settings.builder()
.putList(DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.undeprecated.property"))
.putList(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.undeprecated.property"))
.build();
clusterSettings.applySettings(newSettings);
transportNodeDeprecationCheckAction.nodeOperation(nodeRequest, nodeSettingsChecks);
@ -141,7 +144,7 @@ public class TransportNodeDeprecationCheckActionTests extends ESTestCase {
settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5"));
// This is the node setting (since this is the node deprecation check), not the cluster setting:
settingsBuilder.putList(
DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(),
TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(),
List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property")
);
expectedSettings = settingsBuilder.build();

View file

@ -10,18 +10,18 @@ package org.elasticsearch.xpack.downsample;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType;
public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher {
private final AggregateDoubleMetricFieldType aggMetricFieldType;
private final AggregateMetricDoubleFieldType aggMetricFieldType;
private final AbstractDownsampleFieldProducer fieldProducer;
AggregateMetricFieldValueFetcher(
MappedFieldType fieldType,
AggregateDoubleMetricFieldType aggMetricFieldType,
AggregateMetricDoubleFieldType aggMetricFieldType,
IndexFieldData<?> fieldData
) {
super(fieldType.name(), fieldType, fieldData);
@ -34,7 +34,7 @@ public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher {
}
private AbstractDownsampleFieldProducer createFieldProducer() {
AggregateDoubleMetricFieldMapper.Metric metric = null;
AggregateMetricDoubleFieldMapper.Metric metric = null;
for (var e : aggMetricFieldType.getMetricFields().entrySet()) {
NumberFieldMapper.NumberFieldType metricSubField = e.getValue();
if (metricSubField.name().equals(name())) {
@ -52,7 +52,7 @@ public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher {
case min -> new MetricFieldProducer.Min();
case sum -> new MetricFieldProducer.Sum();
// To compute value_count summary, we must sum all field values
case value_count -> new MetricFieldProducer.Sum(AggregateDoubleMetricFieldMapper.Metric.value_count.name());
case value_count -> new MetricFieldProducer.Sum(AggregateMetricDoubleFieldMapper.Metric.value_count.name());
};
return new MetricFieldProducer.GaugeMetricFieldProducer(aggMetricFieldType.name(), metricOperation);
} else {

View file

@ -15,7 +15,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper;
import java.util.ArrayList;
import java.util.Collections;
@ -82,7 +82,7 @@ class FieldValueFetcher {
MappedFieldType fieldType = context.getFieldType(field);
assert fieldType != null : "Unknown field type for field: [" + field + "]";
if (fieldType instanceof AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType aggMetricFieldType) {
if (fieldType instanceof AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType aggMetricFieldType) {
// If the field is an aggregate_metric_double field, we should load all its subfields
// This is a downsample-of-downsample case
for (NumberFieldMapper.NumberFieldType metricSubField : aggMetricFieldType.getMetricFields().values()) {

View file

@ -12,7 +12,7 @@ import org.elasticsearch.index.fielddata.FormattedDocValues;
import org.elasticsearch.index.fielddata.HistogramValue;
import org.elasticsearch.index.mapper.flattened.FlattenedFieldSyntheticWriterHelper;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric;
import java.io.IOException;
import java.util.ArrayList;

View file

@ -77,7 +77,7 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState;
import org.elasticsearch.xpack.core.downsample.DownsampleShardTask;
@ -756,9 +756,9 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc
final String[] supportedAggsArray = metricType.supportedAggs();
// We choose max as the default metric
final String defaultMetric = List.of(supportedAggsArray).contains("max") ? "max" : supportedAggsArray[0];
builder.field("type", AggregateDoubleMetricFieldMapper.CONTENT_TYPE)
.array(AggregateDoubleMetricFieldMapper.Names.METRICS, supportedAggsArray)
.field(AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC, defaultMetric)
builder.field("type", AggregateMetricDoubleFieldMapper.CONTENT_TYPE)
.array(AggregateMetricDoubleFieldMapper.Names.METRICS, supportedAggsArray)
.field(AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC, defaultMetric)
.field(TIME_SERIES_METRIC_PARAM, metricType);
}
builder.endObject();

View file

@ -33,6 +33,7 @@ import static org.elasticsearch.compute.gen.Types.AGGREGATOR_FUNCTION_SUPPLIER;
import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT;
import static org.elasticsearch.compute.gen.Types.LIST_INTEGER;
import static org.elasticsearch.compute.gen.Types.STRING;
import static org.elasticsearch.compute.gen.Types.WARNINGS;
/**
* Implements "AggregationFunctionSupplier" from a class annotated with both
@ -139,8 +140,9 @@ public class AggregatorFunctionSupplierImplementer {
if (hasWarnings) {
builder.addStatement(
"var warnings = Warnings.createWarnings(driverContext.warningsMode(), "
+ "warningsLineNumber, warningsColumnNumber, warningsSourceText)"
"var warnings = $T.createWarnings(driverContext.warningsMode(), "
+ "warningsLineNumber, warningsColumnNumber, warningsSourceText)",
WARNINGS
);
}
@ -164,8 +166,9 @@ public class AggregatorFunctionSupplierImplementer {
if (hasWarnings) {
builder.addStatement(
"var warnings = Warnings.createWarnings(driverContext.warningsMode(), "
+ "warningsLineNumber, warningsColumnNumber, warningsSourceText)"
"var warnings = $T.createWarnings(driverContext.warningsMode(), "
+ "warningsLineNumber, warningsColumnNumber, warningsSourceText)",
WARNINGS
);
}

View file

@ -8,7 +8,9 @@
package org.elasticsearch.xpack.esql.action;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.compute.operator.exchange.ExchangeService;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.FailingFieldPlugin;
@ -27,9 +29,23 @@ import static org.hamcrest.Matchers.equalTo;
*/
@ESIntegTestCase.ClusterScope(minNumDataNodes = 2)
public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class);
var plugins = new ArrayList<>(super.nodePlugins());
plugins.add(FailingFieldPlugin.class);
plugins.add(InternalExchangePlugin.class);
return plugins;
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
Settings settings = Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(3000, 4000)))
.build();
logger.info("settings {}", settings);
return settings;
}
/**
@ -49,7 +65,7 @@ public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase {
mapping.endObject();
client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get();
int docCount = 100;
int docCount = 50;
List<IndexRequestBuilder> docs = new ArrayList<>(docCount);
for (int d = 0; d < docCount; d++) {
docs.add(client().prepareIndex("ok").setSource("foo", d));

View file

@ -254,7 +254,7 @@ public class Match extends FullTextFunction implements OptionalArgument, PostAna
valueHint = { "none", "all" },
description = "Number of beginning characters left unchanged for fuzzy matching."
) },
description = "Match additional options as <<esql-function-named-params,function named parameters>>."
description = "(Optional) Match additional options as <<esql-function-named-params,function named parameters>>."
+ " See <<query-dsl-match-query,match query>> for more information.",
optional = true
) Expression options

View file

@ -48,6 +48,7 @@ import java.lang.invoke.MethodType;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -99,16 +100,11 @@ final class AggregateMapper {
/** Map of AggDef types to intermediate named expressions. */
private static final Map<AggDef, List<IntermediateStateDesc>> MAPPER = AGG_FUNCTIONS.stream()
.flatMap(AggregateMapper::typeAndNames)
.flatMap(AggregateMapper::groupingAndNonGrouping)
.flatMap(AggregateMapper::aggDefs)
.collect(Collectors.toUnmodifiableMap(aggDef -> aggDef, AggregateMapper::lookupIntermediateState));
/** Cache of aggregates to intermediate expressions. */
private final HashMap<Expression, List<NamedExpression>> cache;
AggregateMapper() {
cache = new HashMap<>();
}
private final HashMap<Expression, List<NamedExpression>> cache = new HashMap<>();
public List<NamedExpression> mapNonGrouping(List<? extends NamedExpression> aggregates) {
return doMapping(aggregates, false);
@ -167,7 +163,7 @@ final class AggregateMapper {
return l;
}
private static Stream<Tuple<Class<?>, Tuple<String, String>>> typeAndNames(Class<?> clazz) {
private static Stream<AggDef> aggDefs(Class<?> clazz) {
List<String> types;
List<String> extraConfigs = List.of("");
if (NumericAggregate.class.isAssignableFrom(clazz)) {
@ -197,32 +193,26 @@ final class AggregateMapper {
assert false : "unknown aggregate type " + clazz;
throw new IllegalArgumentException("unknown aggregate type " + clazz);
}
return combine(clazz, types, extraConfigs);
}
return combinations(types, extraConfigs).flatMap(typeAndExtraConfig -> {
var type = typeAndExtraConfig.v1();
var extra = typeAndExtraConfig.v2();
private static Stream<Tuple<Class<?>, Tuple<String, String>>> combine(Class<?> clazz, List<String> types, List<String> extraConfigs) {
return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo));
if (clazz.isAssignableFrom(Rate.class)) {
// rate doesn't support non-grouping aggregations
return Stream.of(new AggDef(clazz, type, extra, true));
} else if (Objects.equals(type, "AggregateMetricDouble")) {
// TODO: support grouping aggregations for aggregate metric double
return Stream.of(new AggDef(clazz, type, extra, false));
} else {
return Stream.of(new AggDef(clazz, type, extra, true), new AggDef(clazz, type, extra, false));
}
});
}
private static Stream<Tuple<String, String>> combinations(List<String> types, List<String> extraConfigs) {
return types.stream().flatMap(type -> extraConfigs.stream().map(config -> new Tuple<>(type, config)));
}
private static Stream<AggDef> groupingAndNonGrouping(Tuple<Class<?>, Tuple<String, String>> tuple) {
if (tuple.v1().isAssignableFrom(Rate.class)) {
// rate doesn't support non-grouping aggregations
return Stream.of(new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true));
} else if (tuple.v2().v1().equals("AggregateMetricDouble")) {
// TODO: support grouping aggregations for aggregate metric double
return Stream.of(new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), false));
} else {
return Stream.of(
new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true),
new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), false)
);
}
}
/** Retrieves the intermediate state description for a given class, type, and grouping. */
private static List<IntermediateStateDesc> lookupIntermediateState(AggDef aggDef) {
try {
@ -264,23 +254,13 @@ final class AggregateMapper {
/** Determines the engines agg class name, for the given class, type, and grouping. */
private static String determineAggName(Class<?> clazz, String type, String extra, boolean grouping) {
StringBuilder sb = new StringBuilder();
sb.append(determinePackageName(clazz)).append(".");
sb.append(clazz.getSimpleName());
sb.append(type);
sb.append(extra);
sb.append(grouping ? "Grouping" : "");
sb.append("AggregatorFunction");
return sb.toString();
}
/** Determines the engine agg package name, for the given class. */
private static String determinePackageName(Class<?> clazz) {
if (clazz.getSimpleName().startsWith("Spatial")) {
// All spatial aggs are in the spatial sub-package
return "org.elasticsearch.compute.aggregation.spatial";
}
return "org.elasticsearch.compute.aggregation";
return "org.elasticsearch.compute.aggregation."
+ (clazz.getSimpleName().startsWith("Spatial") ? "spatial." : "")
+ clazz.getSimpleName()
+ type
+ extra
+ (grouping ? "Grouping" : "")
+ "AggregatorFunction";
}
/** Maps intermediate state description to named expressions. */
@ -317,19 +297,16 @@ final class AggregateMapper {
if (aggClass == ToPartial.class || aggClass == FromPartial.class) {
return "";
}
if ((aggClass == Max.class || aggClass == Min.class) && type.equals(DataType.IP)) {
return "Ip";
}
if (aggClass == Top.class && type.equals(DataType.IP)) {
if ((aggClass == Max.class || aggClass == Min.class || aggClass == Top.class) && type.equals(DataType.IP)) {
return "Ip";
}
return switch (type) {
case DataType.BOOLEAN -> "Boolean";
case DataType.INTEGER, DataType.COUNTER_INTEGER -> "Int";
case DataType.LONG, DataType.DATETIME, DataType.COUNTER_LONG, DataType.DATE_NANOS -> "Long";
case DataType.DOUBLE, DataType.COUNTER_DOUBLE -> "Double";
case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT, DataType.SEMANTIC_TEXT -> "BytesRef";
case BOOLEAN -> "Boolean";
case INTEGER, COUNTER_INTEGER -> "Int";
case LONG, DATETIME, COUNTER_LONG, DATE_NANOS -> "Long";
case DOUBLE, COUNTER_DOUBLE -> "Double";
case KEYWORD, IP, VERSION, TEXT, SEMANTIC_TEXT -> "BytesRef";
case GEO_POINT -> "GeoPoint";
case CARTESIAN_POINT -> "CartesianPoint";
case GEO_SHAPE -> "GeoShape";

View file

@ -1253,7 +1253,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase {
builder.startObject();
builder.field("name", arg.name());
if (arg.mapArg()) {
builder.field("type", "function named parameters");
builder.field("type", "function_named_parameters");
builder.field(
"mapParams",
arg.mapParams()

View file

@ -300,7 +300,7 @@ public abstract class BaseTransportInferenceAction<Request extends BaseInference
}
private void inferOnService(Model model, Request request, InferenceService service, ActionListener<InferenceServiceResults> listener) {
if (request.isStreaming() == false || service.canStream(request.getTaskType())) {
if (request.isStreaming() == false || service.canStream(model.getTaskType())) {
doInference(model, request, service, listener);
} else {
listener.onFailure(unsupportedStreamingTaskException(request, service));

View file

@ -44,16 +44,8 @@ public class AnthropicResponseHandler extends BaseResponseHandler {
static final String SERVER_BUSY = "Received an Anthropic server is temporarily overloaded status code";
private final boolean canHandleStreamingResponses;
public AnthropicResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) {
super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse);
this.canHandleStreamingResponses = canHandleStreamingResponses;
}
@Override
public boolean canHandleStreamingResponses() {
return canHandleStreamingResponses;
super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses);
}
@Override

View file

@ -34,16 +34,9 @@ import java.util.concurrent.Flow;
public class CohereResponseHandler extends BaseResponseHandler {
static final String TEXTS_ARRAY_TOO_LARGE_MESSAGE_MATCHER = "invalid request: total number of texts must be at most";
static final String TEXTS_ARRAY_ERROR_MESSAGE = "Received a texts array too large response";
private final boolean canHandleStreamingResponse;
public CohereResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponse) {
super(requestType, parseFunction, CohereErrorResponseEntity::fromResponse);
this.canHandleStreamingResponse = canHandleStreamingResponse;
}
@Override
public boolean canHandleStreamingResponses() {
return canHandleStreamingResponse;
super(requestType, parseFunction, CohereErrorResponseEntity::fromResponse, canHandleStreamingResponse);
}
@Override

View file

@ -21,6 +21,10 @@ public class ElasticInferenceServiceResponseHandler extends BaseResponseHandler
super(requestType, parseFunction, ElasticInferenceServiceErrorResponseEntity::fromResponse);
}
public ElasticInferenceServiceResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) {
super(requestType, parseFunction, ElasticInferenceServiceErrorResponseEntity::fromResponse, canHandleStreamingResponses);
}
@Override
protected void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
if (result.isSuccessfulResponse()) {

View file

@ -20,12 +20,7 @@ import java.util.concurrent.Flow;
public class ElasticInferenceServiceUnifiedChatCompletionResponseHandler extends ElasticInferenceServiceResponseHandler {
public ElasticInferenceServiceUnifiedChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) {
super(requestType, parseFunction);
}
@Override
public boolean canHandleStreamingResponses() {
return true;
super(requestType, parseFunction, true);
}
@Override

View file

@ -28,7 +28,6 @@ import static org.elasticsearch.core.Strings.format;
public class GoogleAiStudioResponseHandler extends BaseResponseHandler {
static final String GOOGLE_AI_STUDIO_UNAVAILABLE = "The Google AI Studio service may be temporarily overloaded or down";
private final boolean canHandleStreamingResponses;
private final CheckedFunction<XContentParser, String, IOException> content;
public GoogleAiStudioResponseHandler(String requestType, ResponseParser parseFunction) {
@ -44,8 +43,7 @@ public class GoogleAiStudioResponseHandler extends BaseResponseHandler {
boolean canHandleStreamingResponses,
CheckedFunction<XContentParser, String, IOException> content
) {
super(requestType, parseFunction, GoogleAiStudioErrorResponseEntity::fromResponse);
this.canHandleStreamingResponses = canHandleStreamingResponses;
super(requestType, parseFunction, GoogleAiStudioErrorResponseEntity::fromResponse, canHandleStreamingResponses);
this.content = content;
}
@ -88,11 +86,6 @@ public class GoogleAiStudioResponseHandler extends BaseResponseHandler {
return format("Resource not found at [%s]", request.getURI());
}
@Override
public boolean canHandleStreamingResponses() {
return canHandleStreamingResponses;
}
@Override
public InferenceServiceResults parseResult(Request request, Flow.Publisher<HttpResult> flow) {
var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser());

View file

@ -38,11 +38,27 @@ public abstract class BaseResponseHandler implements ResponseHandler {
protected final String requestType;
private final ResponseParser parseFunction;
private final Function<HttpResult, ErrorResponse> errorParseFunction;
private final boolean canHandleStreamingResponses;
public BaseResponseHandler(String requestType, ResponseParser parseFunction, Function<HttpResult, ErrorResponse> errorParseFunction) {
this(requestType, parseFunction, errorParseFunction, false);
}
public BaseResponseHandler(
String requestType,
ResponseParser parseFunction,
Function<HttpResult, ErrorResponse> errorParseFunction,
boolean canHandleStreamingResponses
) {
this.requestType = Objects.requireNonNull(requestType);
this.parseFunction = Objects.requireNonNull(parseFunction);
this.errorParseFunction = Objects.requireNonNull(errorParseFunction);
this.canHandleStreamingResponses = canHandleStreamingResponses;
}
@Override
public boolean canHandleStreamingResponses() {
return canHandleStreamingResponses;
}
@Override

View file

@ -52,11 +52,8 @@ public interface ResponseHandler {
/**
* Returns {@code true} if the response handler can handle streaming results, or {@code false} if can only parse the entire payload.
* Defaults to {@code false}.
*/
default boolean canHandleStreamingResponses() {
return false;
}
boolean canHandleStreamingResponses();
/**
* A method for parsing the streamed response from the server. Implementations must invoke the

View file

@ -41,11 +41,8 @@ public class OpenAiResponseHandler extends BaseResponseHandler {
static final String OPENAI_SERVER_BUSY = "Received a server busy error status code";
private final boolean canHandleStreamingResponses;
public OpenAiResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) {
super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse);
this.canHandleStreamingResponses = canHandleStreamingResponses;
super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses);
}
/**
@ -121,11 +118,6 @@ public class OpenAiResponseHandler extends BaseResponseHandler {
return RATE_LIMIT + ". " + usageMessage;
}
@Override
public boolean canHandleStreamingResponses() {
return canHandleStreamingResponses;
}
@Override
public InferenceServiceResults parseResult(Request request, Flow.Publisher<HttpResult> flow) {
var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser());

View file

@ -15,6 +15,12 @@ import org.elasticsearch.xpack.inference.external.request.Request;
import org.elasticsearch.xpack.inference.logging.ThrottlerManager;
public abstract class AmazonBedrockResponseHandler implements ResponseHandler {
@Override
public boolean canHandleStreamingResponses() {
return false;
}
@Override
public final void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result)
throws RetryException {

View file

@ -37,7 +37,7 @@ import java.util.Objects;
import java.util.Set;
public abstract class SenderService implements InferenceService {
protected static final Set<TaskType> COMPLETION_ONLY = EnumSet.of(TaskType.COMPLETION, TaskType.ANY);
protected static final Set<TaskType> COMPLETION_ONLY = EnumSet.of(TaskType.COMPLETION);
private final Sender sender;
private final ServiceComponents serviceComponents;

View file

@ -248,10 +248,6 @@ public class ElasticInferenceService extends SenderService {
var authorizedStreamingTaskTypes = EnumSet.of(TaskType.CHAT_COMPLETION);
authorizedStreamingTaskTypes.retainAll(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes());
if (authorizedStreamingTaskTypes.isEmpty() == false) {
authorizedStreamingTaskTypes.add(TaskType.ANY);
}
return authorizedStreamingTaskTypes;
}

View file

@ -376,7 +376,7 @@ public class OpenAiService extends SenderService {
@Override
public Set<TaskType> supportedStreamingTasks() {
return EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION, TaskType.ANY);
return EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION);
}
/**

View file

@ -33,6 +33,11 @@ public class InferenceServiceNodeLocalRateLimitCalculatorTests extends ESIntegTe
public void setUp() throws Exception {
super.setUp();
assumeTrue(
"If inference_cluster_aware_rate_limiting_feature_flag_enabled=false we'll fallback to "
+ "NoopNodeLocalRateLimitCalculator, which shouldn't be tested by this class.",
InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled()
);
}
public void testInitialClusterGrouping_Correct() throws Exception {

View file

@ -58,4 +58,8 @@ public class AlwaysRetryingResponseHandler implements ResponseHandler {
}
}
@Override
public boolean canHandleStreamingResponses() {
return false;
}
}

View file

@ -654,6 +654,11 @@ public class RetryingHttpSenderTests extends ESTestCase {
public String getRequestType() {
return "foo";
}
@Override
public boolean canHandleStreamingResponses() {
return false;
}
};
}
}

Some files were not shown because too many files have changed in this diff Show more