Merge remote-tracking branch 'es/master' into enrich

This commit is contained in:
Martijn van Groningen 2019-09-27 13:51:17 +02:00
commit f676d9730d
No known key found for this signature in database
GPG key ID: AB236F4FCF2AF12A
936 changed files with 19266 additions and 13906 deletions

View file

@ -10,6 +10,8 @@ initscript {
}
}
boolean USE_ARTIFACTORY=false
['VAULT_ADDR', 'VAULT_ROLE_ID', 'VAULT_SECRET_ID'].each {
if (System.env."$it" == null) {
throw new GradleException("$it must be set!")
@ -37,39 +39,44 @@ final Vault vault = new Vault(
)
.withRetries(5, 1000)
final Map<String,String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData();
logger.info("Using elastic artifactory repos")
Closure configCache = {
return {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials {
username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token")
if (USE_ARTIFACTORY) {
final Map<String,String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData();
logger.info("Using elastic artifactory repos")
Closure configCache = {
return {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials {
username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token")
}
}
}
}
settingsEvaluated { settings ->
settings.pluginManagement {
repositories {
maven configCache()
}
}
}
projectsLoaded {
allprojects {
buildscript {
settingsEvaluated { settings ->
settings.pluginManagement {
repositories {
maven configCache()
}
}
repositories {
maven configCache()
}
projectsLoaded {
allprojects {
buildscript {
repositories {
maven configCache()
}
}
repositories {
maven configCache()
}
}
}
}
projectsLoaded {
rootProject {
project.pluginManager.withPlugin('com.gradle.build-scan') {
buildScan.server = 'https://gradle-enterprise.elastic.co'
@ -77,6 +84,7 @@ projectsLoaded {
}
}
final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.url')
final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false'))

362
.eclipseformat.xml Normal file
View file

@ -0,0 +1,362 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<profiles version="17">
<profile kind="CodeFormatterProfile" name="Elasticsearch" version="17">
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_logical_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="separate_lines_if_wrapped"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="separate_lines_if_wrapped"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.align_with_spaces" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="49"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_logical_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="140"/>
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.keep_method_body_on_one_line" value="one_line_if_empty"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_additive_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_relational_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_shift_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_loops" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_relational_operator" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_additive_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_additive_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_shift_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.keep_code_block_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="49"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="separate_lines_if_wrapped"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line" value="one_line_always"/>
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_logical_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_relational_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_tag_description" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_string_concatenation" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_logical_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_shift_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_shift_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_additive_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_relational_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_string_concatenation" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="140"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
</profile>
</profiles>

View file

@ -155,19 +155,68 @@ For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to
### Java Language Formatting Guidelines
Java files in the Elasticsearch codebase are formatted with the Eclipse JDT
formatter, using the [Spotless
Gradle](https://github.com/diffplug/spotless/tree/master/plugin-gradle)
plugin. This plugin is configured on a project-by-project basis, via
`build.gradle` in the root of the repository. So long as at least one
project is configured, the formatting check can be run explicitly with:
./gradlew spotlessJavaCheck
The code can be formatted with:
./gradlew spotlessApply
These tasks can also be run for specific subprojects, e.g.
./gradlew server:spotlessJavaCheck
Please follow these formatting guidelines:
* Java indent is 4 spaces
* Line width is 140 characters
* Lines of code surrounded by `// tag` and `// end` comments are included in the
documentation and should only be 76 characters wide not counting
leading indentation
* The rest is left to Java coding standards
* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do.
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE:
* Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value.
* IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value.
* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so.
* Lines of code surrounded by `// tag` and `// end` comments are included
in the documentation and should only be 76 characters wide not counting
leading indentation
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause
the build to fail. This can be done automatically by your IDE:
* Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are
two boxes labeled "`Number of (static )? imports needed for .*`". Set
their values to 99999 or some other absurdly high value.
* IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`.
There are two configuration options: `Class count to use import with
'*'` and `Names count to use static import with '*'`. Set their values
to 99999 or some other absurdly high value.
#### Editor / IDE Support
Eclipse IDEs can import the file [elasticsearch.eclipseformat.xml]
directly.
IntelliJ IDEs can
[import](https://blog.jetbrains.com/idea/2014/01/intellij-idea-13-importing-code-formatter-settings-from-eclipse/)
the same settings file, and / or use the [Eclipse Code
Formatter](https://plugins.jetbrains.com/plugin/6546-eclipse-code-formatter)
plugin.
You can also tell Spotless to [format a specific
file](https://github.com/diffplug/spotless/tree/master/plugin-gradle#can-i-apply-spotless-to-specific-files)
from the command line.
#### Formatting failures
Sometimes Spotless will report a "misbehaving rule which can't make up its
mind" and will recommend enabling the `paddedCell()` setting. If you
enabled this settings and run the format check again,
Spotless will write files to
`$PROJECT/build/spotless-diagnose-java/` to aid diagnosis. It writes
different copies of the formatted files, so that you can see how they
differ and infer what is the problem.
The `paddedCell() option is disabled for normal operation in order to
detect any misbehaviour. You can enabled the option from the command line
by running Gradle with `-Dspotless.paddedcell`.
### License Headers

View file

@ -35,6 +35,7 @@ plugins {
id 'com.gradle.build-scan' version '2.4'
id 'lifecycle-base'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
}
apply plugin: 'nebula.info-scm'
@ -98,6 +99,34 @@ subprojects {
plugins.withType(BuildPlugin).whenPluginAdded {
project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
project.noticeFile = project.rootProject.file('NOTICE.txt')
// Projects that should be formatted and checked with Spotless are
// listed here, by project path. Once the number of formatted projects
// is greater than the number of unformatted projects, this can be
// switched to an exclude list, and eventualy removed completely.
def projectPathsToFormat = [
// ':build-tools'
]
if (projectPathsToFormat.contains(project.path)) {
project.apply plugin: "com.diffplug.gradle.spotless"
spotless {
java {
removeUnusedImports()
eclipse().configFile rootProject.file('.eclipseformat.xml')
trimTrailingWhitespace()
// See CONTRIBUTING.md for details of when to enabled this.
if (System.getProperty('spotless.paddedcell') != null) {
paddedCell()
}
}
}
precommit.dependsOn 'spotlessJavaCheck'
}
}
}

View file

@ -112,7 +112,7 @@ dependencies {
compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r'
compile 'org.eclipse.jgit:org.eclipse.jgit:5.5.0.201909110433-r'
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
compile 'org.apache.rat:apache-rat:0.11'
compile "org.elasticsearch:jna:4.5.1"

View file

@ -206,8 +206,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
response(snippet)
return
}
if (snippet.test || snippet.console ||
snippet.language == 'console') {
if ((snippet.language == 'js') && (snippet.console)) {
throw new InvalidUserDataException(
"$snippet: Use `[source,console]` instead of `// CONSOLE`.")
}
if (snippet.test || snippet.language == 'console') {
test(snippet)
previousTest = snippet
return

View file

@ -50,6 +50,7 @@ class PluginBuildPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
project.pluginManager.apply(BuildPlugin)
project.pluginManager.apply(TestClustersPlugin)
PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project)
configureDependencies(project)

View file

@ -26,35 +26,18 @@ import org.elasticsearch.gradle.tool.Boilerplate
import org.elasticsearch.gradle.tool.ClasspathUtils
import org.gradle.api.DefaultTask
import org.gradle.api.Task
import org.gradle.api.execution.TaskExecutionAdapter
import org.gradle.api.file.FileCopyDetails
import org.gradle.api.logging.Logger
import org.gradle.api.logging.Logging
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskState
import org.gradle.api.tasks.options.Option
import org.gradle.api.tasks.testing.Test
import org.gradle.plugins.ide.idea.IdeaPlugin
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.util.stream.Stream
/**
* A wrapper task around setting up a cluster and running rest tests.
*/
class RestIntegTestTask extends DefaultTask {
private static final Logger LOGGER = Logging.getLogger(RestIntegTestTask)
protected ClusterConfiguration clusterConfig
protected Test runner
/** Info about nodes in the integ test cluster. Note this is *not* available until runtime. */
List<NodeInfo> nodes
/** Flag indicating whether the rest tests in the rest spec should be run. */
@Input
Boolean includePackaged = false
@ -62,18 +45,13 @@ class RestIntegTestTask extends DefaultTask {
RestIntegTestTask() {
runner = project.tasks.create("${name}Runner", RestTestRunnerTask.class)
super.dependsOn(runner)
boolean usesTestclusters = project.plugins.hasPlugin(TestClustersPlugin.class)
if (usesTestclusters == false) {
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
runner.outputs.doNotCacheIf("Caching is disabled when using ClusterFormationTasks", { true })
} else {
project.testClusters {
project.testClusters {
"$name" {
javaHome = project.file(project.ext.runtimeJavaHome)
}
}
runner.useCluster project.testClusters."$name"
}
runner.useCluster project.testClusters."$name"
runner.include('**/*IT.class')
runner.systemProperty('tests.rest.load_packaged', 'false')
@ -82,40 +60,11 @@ class RestIntegTestTask extends DefaultTask {
if (System.getProperty("tests.cluster") != null || System.getProperty("tests.clustername") != null) {
throw new IllegalArgumentException("tests.rest.cluster, tests.cluster, and tests.clustername must all be null or non-null")
}
if (usesTestclusters == true) {
ElasticsearchCluster cluster = project.testClusters."${name}"
runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",")}")
runner.nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI}")
runner.nonInputProperties.systemProperty('tests.clustername', "${-> cluster.getName()}")
} else {
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
// this is more realistic than just talking to a single node
runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}")
runner.nonInputProperties.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}")
// TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin
// that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass
// both as separate sysprops
runner.nonInputProperties.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
runner.nonInputProperties.systemProperty('tests.clustername', "${-> nodes[0].clusterName}")
// dump errors and warnings from cluster log on failure
TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
@Override
void afterExecute(Task task, TaskState state) {
if (task == runner && state.failure != null) {
for (NodeInfo nodeInfo : nodes) {
printLogExcerpt(nodeInfo)
}
}
}
}
runner.doFirst {
project.gradle.addListener(logDumpListener)
}
runner.doLast {
project.gradle.removeListener(logDumpListener)
}
}
ElasticsearchCluster cluster = project.testClusters."${name}"
runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",")}")
runner.nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI}")
runner.nonInputProperties.systemProperty('tests.clustername', "${-> cluster.getName()}")
} else {
if (System.getProperty("tests.cluster") == null || System.getProperty("tests.clustername") == null) {
throw new IllegalArgumentException("tests.rest.cluster, tests.cluster, and tests.clustername must all be null or non-null")
@ -137,13 +86,6 @@ class RestIntegTestTask extends DefaultTask {
runner.enabled = false
return // no need to add cluster formation tasks if the task won't run!
}
if (usesTestclusters == false) {
// only create the cluster if needed as otherwise an external cluster to use was specified
if (System.getProperty("tests.rest.cluster") == null) {
nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig)
}
super.dependsOn(runner.finalizedBy)
}
}
}
@ -152,17 +94,6 @@ class RestIntegTestTask extends DefaultTask {
includePackaged = include
}
@Option(
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
public void setDebug(boolean enabled) {
clusterConfig.debug = enabled;
}
public List<NodeInfo> getNodes() {
return nodes
}
@Override
public Task dependsOn(Object... dependencies) {
@ -189,44 +120,6 @@ class RestIntegTestTask extends DefaultTask {
project.tasks.getByName("${name}Runner").configure(configure)
}
/** Print out an excerpt of the log from the given node. */
protected static void printLogExcerpt(NodeInfo nodeInfo) {
File logFile = new File(nodeInfo.homeDir, "logs/${nodeInfo.clusterName}.log")
LOGGER.lifecycle("\nCluster ${nodeInfo.clusterName} - node ${nodeInfo.nodeNum} log excerpt:")
LOGGER.lifecycle("(full log at ${logFile})")
LOGGER.lifecycle('-----------------------------------------')
Stream<String> stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8)
try {
boolean inStartup = true
boolean inExcerpt = false
int linesSkipped = 0
for (String line : stream) {
if (line.startsWith("[")) {
inExcerpt = false // clear with the next log message
}
if (line =~ /(\[WARN *\])|(\[ERROR *\])/) {
inExcerpt = true // show warnings and errors
}
if (inStartup || inExcerpt) {
if (linesSkipped != 0) {
LOGGER.lifecycle("... SKIPPED ${linesSkipped} LINES ...")
}
LOGGER.lifecycle(line)
linesSkipped = 0
} else {
++linesSkipped
}
if (line =~ /recovered \[\d+\] indices into cluster_state/) {
inStartup = false
}
}
} finally {
stream.close()
}
LOGGER.lifecycle('=========================================')
}
Copy createCopyRestSpecTask() {
Boilerplate.maybeCreate(project.configurations, 'restSpec') {
project.dependencies.add(

View file

@ -19,6 +19,7 @@
package org.elasticsearch.gradle.test
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Plugin
import org.gradle.api.Project
@ -43,6 +44,7 @@ public class RestTestPlugin implements Plugin<Project> {
+ 'elasticsearch.standalone-rest-test')
}
project.pluginManager.apply(TestClustersPlugin)
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.description = 'Runs rest tests against an elasticsearch cluster.'
integTest.group = JavaBasePlugin.VERIFICATION_GROUP

View file

@ -20,13 +20,12 @@
package org.elasticsearch.gradle.test
import groovy.transform.CompileStatic
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin
import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.JavaVersion
import org.gradle.api.Plugin
@ -42,7 +41,6 @@ import org.gradle.api.tasks.compile.JavaCompile
import org.gradle.api.tasks.testing.Test
import org.gradle.plugins.ide.eclipse.model.EclipseModel
import org.gradle.plugins.ide.idea.model.IdeaModel
/**
* Configures the build to compile tests against Elasticsearch's test framework
* and run REST tests. Use BuildPlugin if you want to build main code as well
@ -60,6 +58,7 @@ class StandaloneRestTestPlugin implements Plugin<Project> {
}
project.rootProject.pluginManager.apply(GlobalBuildInfoPlugin)
project.pluginManager.apply(JavaBasePlugin)
project.pluginManager.apply(TestClustersPlugin)
project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask)
BuildPlugin.configureRepositories(project)

View file

@ -56,7 +56,8 @@ class TestWithDependenciesPlugin implements Plugin<Project> {
private static addPluginResources(Project project, Project pluginProject) {
String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}"
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
String camelName = pluginProject.name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) }
String taskName = "copy" + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + "Metadata"
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
copyPluginMetadata.into(outputDir)
copyPluginMetadata.from(pluginProject.tasks.pluginProperties)

View file

@ -70,19 +70,13 @@ public class TestingConventionsTasks extends DefaultTask {
}
@Input
public Map<String, Set<File>> classFilesPerEnabledTask(FileTree testClassFiles) {
Map<String, Set<File>> collector = new HashMap<>();
// Gradle Test
collector.putAll(
getProject().getTasks().withType(Test.class).stream()
.filter(Task::getEnabled)
.collect(Collectors.toMap(
Task::getPath,
task -> task.getCandidateClassFiles().getFiles()
))
);
return Collections.unmodifiableMap(collector);
public Map<String, Set<File>> getClassFilesPerEnabledTask() {
return getProject().getTasks().withType(Test.class).stream()
.filter(Task::getEnabled)
.collect(Collectors.toMap(
Task::getPath,
task -> task.getCandidateClassFiles().getFiles()
));
}
@Input
@ -154,7 +148,7 @@ public class TestingConventionsTasks extends DefaultTask {
.collect(Collectors.toList())
).getAsFileTree();
final Map<String, Set<File>> classFilesPerTask = classFilesPerEnabledTask(allTestClassFiles);
final Map<String, Set<File>> classFilesPerTask = getClassFilesPerEnabledTask();
final Map<String, Set<Class<?>>> testClassesPerTask = classFilesPerTask.entrySet().stream()
.collect(

View file

@ -968,12 +968,6 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// Don't wait for state, just start up quickly. This will also allow new and old nodes in the BWC case to become the master
defaultConfig.put("discovery.initial_state_timeout", "0s");
// TODO: Remove these once https://github.com/elastic/elasticsearch/issues/46091 is fixed
defaultConfig.put("logger.org.elasticsearch.action.support.master.TransportMasterNodeAction", "TRACE");
defaultConfig.put("logger.org.elasticsearch.cluster.metadata.MetaDataCreateIndexService", "TRACE");
defaultConfig.put("logger.org.elasticsearch.cluster.service", "DEBUG");
defaultConfig.put("logger.org.elasticsearch.cluster.coordination", "DEBUG");
defaultConfig.put("logger.org.elasticsearch.gateway.MetaStateService", "TRACE");
if (getVersion().getMajor() >= 8) {
defaultConfig.put("cluster.service.slow_task_logging_threshold", "5s");
defaultConfig.put("cluster.service.slow_master_task_logging_threshold", "5s");

View file

@ -18,20 +18,65 @@
*/
package org.elasticsearch.gradle.testfixtures;
import org.gradle.api.GradleException;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
public class TestFixtureExtension {
private final Project project;
final NamedDomainObjectContainer<Project> fixtures;
final Map<String, String> serviceToProjectUseMap = new HashMap<>();
public TestFixtureExtension(Project project) {
this.project = project;
this.fixtures = project.container(Project.class);
}
public void useFixture() {
useFixture(this.project.getPath());
}
public void useFixture(String path) {
addFixtureProject(path);
serviceToProjectUseMap.put(path, this.project.getPath());
}
public void useFixture(String path, String serviceName) {
addFixtureProject(path);
String key = getServiceNameKey(path, serviceName);
serviceToProjectUseMap.put(key, this.project.getPath());
Optional<String> otherProject = this.findOtherProjectUsingService(key);
if (otherProject.isPresent()) {
throw new GradleException(
"Projects " + otherProject.get() + " and " + this.project.getPath() + " both claim the "+ serviceName +
" service defined in the docker-compose.yml of " + path + "This is not supported because it breaks " +
"running in parallel. Configure dedicated services for each project and use those instead."
);
}
}
private String getServiceNameKey(String fixtureProjectPath, String serviceName) {
return fixtureProjectPath + "::" + serviceName;
}
private Optional<String> findOtherProjectUsingService(String serviceName) {
return this.project.getRootProject().getAllprojects().stream()
.filter(p -> p.equals(this.project) == false)
.filter(p -> p.getExtensions().findByType(TestFixtureExtension.class) != null)
.map(project -> project.getExtensions().getByType(TestFixtureExtension.class))
.flatMap(ext -> ext.serviceToProjectUseMap.entrySet().stream())
.filter(entry -> entry.getKey().equals(serviceName))
.map(Map.Entry::getValue)
.findAny();
}
private void addFixtureProject(String path) {
Project fixtureProject = this.project.findProject(path);
if (fixtureProject == null) {
throw new IllegalArgumentException("Could not find test fixture " + fixtureProject);
@ -42,6 +87,20 @@ public class TestFixtureExtension {
);
}
fixtures.add(fixtureProject);
// Check for exclusive access
Optional<String> otherProject = this.findOtherProjectUsingService(path);
if (otherProject.isPresent()) {
throw new GradleException("Projects " + otherProject.get() + " and " + this.project.getPath() + " both " +
"claim all services from " + path + ". This is not supported because it breaks running in parallel. " +
"Configure specific services in docker-compose.yml for each and add the service name to `useFixture`"
);
}
}
boolean isServiceRequired(String serviceName, String fixtureProject) {
if (serviceToProjectUseMap.containsKey(fixtureProject)) {
return true;
}
return serviceToProjectUseMap.containsKey(getServiceNameKey(fixtureProject, serviceName));
}
}

View file

@ -20,6 +20,7 @@ package org.elasticsearch.gradle.testfixtures;
import com.avast.gradle.dockercompose.ComposeExtension;
import com.avast.gradle.dockercompose.DockerComposePlugin;
import com.avast.gradle.dockercompose.ServiceInfo;
import com.avast.gradle.dockercompose.tasks.ComposeUp;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider;
@ -58,9 +59,6 @@ public class TestFixturesPlugin implements Plugin<Project> {
ext.set("testFixturesDir", testfixturesDir);
if (project.file(DOCKER_COMPOSE_YML).exists()) {
// the project that defined a test fixture can also use it
extension.fixtures.add(project);
Task buildFixture = project.getTasks().create("buildFixture");
Task pullFixture = project.getTasks().create("pullFixture");
Task preProcessFixture = project.getTasks().create("preProcessFixture");
@ -106,6 +104,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
configureServiceInfoForTask(
postProcessFixture,
project,
false,
(name, port) -> postProcessFixture.getExtensions()
.getByType(ExtraPropertiesExtension.class).set(name, port)
);
@ -144,6 +143,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
configureServiceInfoForTask(
task,
fixtureProject,
true,
(name, host) ->
task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host)
);
@ -165,14 +165,23 @@ public class TestFixturesPlugin implements Plugin<Project> {
);
}
private void configureServiceInfoForTask(Task task, Project fixtureProject, BiConsumer<String, Integer> consumer) {
private void configureServiceInfoForTask(
Task task, Project fixtureProject, boolean enableFilter, BiConsumer<String, Integer> consumer
) {
// Configure ports for the tests as system properties.
// We only know these at execution time so we need to do it in doFirst
TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class);
task.doFirst(new Action<Task>() {
@Override
public void execute(Task theTask) {
fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos()
.forEach((service, infos) -> {
.entrySet().stream()
.filter(entry -> enableFilter == false ||
extension.isServiceRequired(entry.getKey(), fixtureProject.getPath())
)
.forEach(entry -> {
String service = entry.getKey();
ServiceInfo infos = entry.getValue();
infos.getTcpPorts()
.forEach((container, host) -> {
String name = "test.fixtures." + service + ".tcp." + container;

View file

@ -29,7 +29,7 @@ final class GraphRequestConverters {
private GraphRequestConverters() {}
static Request explore(GraphExploreRequest exploreRequest) throws IOException {
String endpoint = RequestConverters.endpoint(exploreRequest.indices(), exploreRequest.types(), "_graph/explore");
String endpoint = RequestConverters.endpoint(exploreRequest.indices(), "_graph/explore");
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
request.setEntity(RequestConverters.createEntity(exploreRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;

View file

@ -204,7 +204,7 @@ final class IndexLifecycleRequestConverters {
}
static Request executeSnapshotLifecyclePolicy(ExecuteSnapshotLifecyclePolicyRequest executeSnapshotLifecyclePolicyRequest) {
Request request = new Request(HttpPut.METHOD_NAME,
Request request = new Request(HttpPost.METHOD_NAME,
new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_slm/policy")
.addPathPartAsIs(executeSnapshotLifecyclePolicyRequest.getPolicyId())

View file

@ -544,8 +544,7 @@ final class IndicesRequestConverters {
static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException {
String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices();
String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types();
String endpoint = RequestConverters.endpoint(indices, types, "_validate/query");
String endpoint = RequestConverters.endpoint(indices, "_validate/query");
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(validateQueryRequest.indicesOptions());

View file

@ -504,9 +504,7 @@ final class RequestConverters {
}
static Request explain(ExplainRequest explainRequest) throws IOException {
String endpoint = explainRequest.type().equals(MapperService.SINGLE_MAPPING_NAME)
? endpoint(explainRequest.index(), "_explain", explainRequest.id())
: endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain");
String endpoint = endpoint(explainRequest.index(), "_explain", explainRequest.id());
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params();
@ -547,6 +545,10 @@ final class RequestConverters {
return prepareReindexRequest(reindexRequest, false);
}
static Request submitDeleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException {
return prepareDeleteByQueryRequest(deleteByQueryRequest, false);
}
private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException {
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
@ -566,6 +568,35 @@ final class RequestConverters {
return request;
}
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest,
boolean waitForCompletion) throws IOException {
String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params()
.withRouting(deleteByQueryRequest.getRouting())
.withRefresh(deleteByQueryRequest.isRefresh())
.withTimeout(deleteByQueryRequest.getTimeout())
.withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards())
.withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond())
.withIndicesOptions(deleteByQueryRequest.indicesOptions())
.withWaitForCompletion(waitForCompletion);
if (deleteByQueryRequest.isAbortOnVersionConflict() == false) {
params.putParam("conflicts", "proceed");
}
if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize()));
}
if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
params.putParam("scroll", deleteByQueryRequest.getScrollTime());
}
if (deleteByQueryRequest.getMaxDocs() > 0) {
params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs()));
}
request.addParameters(params.asMap());
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request updateByQuery(UpdateByQueryRequest updateByQueryRequest) throws IOException {
String endpoint = endpoint(updateByQueryRequest.indices(), "_update_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
@ -595,30 +626,7 @@ final class RequestConverters {
}
static Request deleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException {
String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params()
.withRouting(deleteByQueryRequest.getRouting())
.withRefresh(deleteByQueryRequest.isRefresh())
.withTimeout(deleteByQueryRequest.getTimeout())
.withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards())
.withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond())
.withIndicesOptions(deleteByQueryRequest.indicesOptions());
if (deleteByQueryRequest.isAbortOnVersionConflict() == false) {
params.putParam("conflicts", "proceed");
}
if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize()));
}
if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
params.putParam("scroll", deleteByQueryRequest.getScrollTime());
}
if (deleteByQueryRequest.getMaxDocs() > 0) {
params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs()));
}
request.addParameters(params.asMap());
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
return prepareDeleteByQueryRequest(deleteByQueryRequest, true);
}
static Request rethrottleReindex(RethrottleRequest rethrottleRequest) {

View file

@ -256,7 +256,7 @@ public class RestHighLevelClient implements Closeable {
private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this);
private final RollupClient rollupClient = new RollupClient(this);
private final CcrClient ccrClient = new CcrClient(this);
private final DataFrameClient dataFrameClient = new DataFrameClient(this);
private final TransformClient transformClient = new TransformClient(this);
private final EnrichClient enrichClient = new EnrichClient(this);
/**
@ -478,8 +478,8 @@ public class RestHighLevelClient implements Closeable {
*
* @return the client wrapper for making Data Frame API calls
*/
public DataFrameClient dataFrame() {
return dataFrameClient;
public TransformClient transform() {
return transformClient;
}
public EnrichClient enrich() {
@ -595,6 +595,21 @@ public class RestHighLevelClient implements Closeable {
);
}
/**
* Submits a delete by query task
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html">
* Delete By Query API on elastic.co</a>
* @param deleteByQueryRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the submission response
*/
public final TaskSubmissionResponse submitDeleteByQueryTask(DeleteByQueryRequest deleteByQueryRequest,
RequestOptions options) throws IOException {
return performRequestAndParseEntity(
deleteByQueryRequest, RequestConverters::submitDeleteByQuery, options, TaskSubmissionResponse::fromXContent, emptySet()
);
}
/**
* Asynchronously executes a delete by query request.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html">

View file

@ -21,29 +21,29 @@ package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
import org.elasticsearch.client.transform.GetDataFrameTransformResponse;
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse;
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse;
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
import org.elasticsearch.client.transform.StartDataFrameTransformResponse;
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
import org.elasticsearch.client.transform.StopDataFrameTransformResponse;
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformResponse;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.GetTransformStatsResponse;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PreviewTransformResponse;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StartTransformResponse;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.StopTransformResponse;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.client.transform.UpdateTransformResponse;
import java.io.IOException;
import java.util.Collections;
public final class DataFrameClient {
public final class TransformClient {
private final RestHighLevelClient restHighLevelClient;
DataFrameClient(RestHighLevelClient restHighLevelClient) {
TransformClient(RestHighLevelClient restHighLevelClient) {
this.restHighLevelClient = restHighLevelClient;
}
@ -54,15 +54,15 @@ public final class DataFrameClient {
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html">
* Create transform documentation</a>
*
* @param request The PutDataFrameTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}.
* @param request The PutTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfig}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An AcknowledgedResponse object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse putDataFrameTransform(PutDataFrameTransformRequest request, RequestOptions options) throws IOException {
public AcknowledgedResponse putTransform(PutTransformRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::putDataFrameTransform,
TransformRequestConverters::putTransform,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
@ -74,16 +74,16 @@ public final class DataFrameClient {
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html">
* Create transform documentation</a>
* @param request The PutDataFrameTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}.
* @param request The PutTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfig}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable putDataFrameTransformAsync(PutDataFrameTransformRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
public Cancellable putTransformAsync(PutTransformRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::putDataFrameTransform,
TransformRequestConverters::putTransform,
options,
AcknowledgedResponse::fromXContent,
listener,
@ -97,18 +97,18 @@ public final class DataFrameClient {
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html">
* Create transform documentation</a>
*
* @param request The UpdateDataFrameTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}.
* @param request The UpdateTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An UpdateDataFrameTransformResponse object containing the updated configuration
* @return An UpdateTransformResponse object containing the updated configuration
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public UpdateDataFrameTransformResponse updateDataFrameTransform(UpdateDataFrameTransformRequest request,
RequestOptions options) throws IOException {
public UpdateTransformResponse updateTransform(UpdateTransformRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::updateDataFrameTransform,
TransformRequestConverters::updateTransform,
options,
UpdateDataFrameTransformResponse::fromXContent,
UpdateTransformResponse::fromXContent,
Collections.emptySet());
}
@ -118,19 +118,19 @@ public final class DataFrameClient {
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html">
* Create transform documentation</a>
* @param request The UpdateDataFrameTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}.
* @param request The UpdateTransformRequest containing the
* {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}.
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable updateDataFrameTransformAsync(UpdateDataFrameTransformRequest request,
RequestOptions options,
ActionListener<UpdateDataFrameTransformResponse> listener) {
public Cancellable updateTransformAsync(UpdateTransformRequest request,
RequestOptions options,
ActionListener<UpdateTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::updateDataFrameTransform,
TransformRequestConverters::updateTransform,
options,
UpdateDataFrameTransformResponse::fromXContent,
UpdateTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
@ -142,17 +142,17 @@ public final class DataFrameClient {
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html">
* Get transform stats documentation</a>
*
* @param request Specifies the which transforms to get the stats for
* @param request Specifies which transforms to get the stats for
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The transform stats
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetDataFrameTransformStatsResponse getDataFrameTransformStats(GetDataFrameTransformStatsRequest request, RequestOptions options)
public GetTransformStatsResponse getTransformStats(GetTransformStatsRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::getDataFrameTransformStats,
TransformRequestConverters::getTransformStats,
options,
GetDataFrameTransformStatsResponse::fromXContent,
GetTransformStatsResponse::fromXContent,
Collections.emptySet());
}
@ -162,17 +162,17 @@ public final class DataFrameClient {
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html">
* Get transform stats documentation</a>
* @param request Specifies the which transforms to get the stats for
* @param request Specifies which transforms to get the stats for
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getDataFrameTransformStatsAsync(GetDataFrameTransformStatsRequest request, RequestOptions options,
ActionListener<GetDataFrameTransformStatsResponse> listener) {
public Cancellable getTransformStatsAsync(GetTransformStatsRequest request, RequestOptions options,
ActionListener<GetTransformStatsResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::getDataFrameTransformStats,
TransformRequestConverters::getTransformStats,
options,
GetDataFrameTransformStatsResponse::fromXContent,
GetTransformStatsResponse::fromXContent,
listener,
Collections.emptySet());
}
@ -189,10 +189,10 @@ public final class DataFrameClient {
* @return An AcknowledgedResponse object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse deleteDataFrameTransform(DeleteDataFrameTransformRequest request, RequestOptions options)
public AcknowledgedResponse deleteTransform(DeleteTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::deleteDataFrameTransform,
TransformRequestConverters::deleteTransform,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
@ -209,10 +209,10 @@ public final class DataFrameClient {
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable deleteDataFrameTransformAsync(DeleteDataFrameTransformRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
public Cancellable deleteTransformAsync(DeleteTransformRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::deleteDataFrameTransform,
TransformRequestConverters::deleteTransform,
options,
AcknowledgedResponse::fromXContent,
listener,
@ -231,12 +231,12 @@ public final class DataFrameClient {
* @return A response containing the results of the applied transform
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PreviewDataFrameTransformResponse previewDataFrameTransform(PreviewDataFrameTransformRequest request, RequestOptions options)
public PreviewTransformResponse previewTransform(PreviewTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::previewDataFrameTransform,
TransformRequestConverters::previewTransform,
options,
PreviewDataFrameTransformResponse::fromXContent,
PreviewTransformResponse::fromXContent,
Collections.emptySet());
}
@ -250,12 +250,12 @@ public final class DataFrameClient {
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable previewDataFrameTransformAsync(PreviewDataFrameTransformRequest request, RequestOptions options,
ActionListener<PreviewDataFrameTransformResponse> listener) {
public Cancellable previewTransformAsync(PreviewTransformRequest request, RequestOptions options,
ActionListener<PreviewTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::previewDataFrameTransform,
TransformRequestConverters::previewTransform,
options,
PreviewDataFrameTransformResponse::fromXContent,
PreviewTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
@ -272,12 +272,12 @@ public final class DataFrameClient {
* @return A response object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public StartDataFrameTransformResponse startDataFrameTransform(StartDataFrameTransformRequest request, RequestOptions options)
public StartTransformResponse startTransform(StartTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::startDataFrameTransform,
TransformRequestConverters::startTransform,
options,
StartDataFrameTransformResponse::fromXContent,
StartTransformResponse::fromXContent,
Collections.emptySet());
}
@ -292,12 +292,12 @@ public final class DataFrameClient {
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable startDataFrameTransformAsync(StartDataFrameTransformRequest request, RequestOptions options,
ActionListener<StartDataFrameTransformResponse> listener) {
public Cancellable startTransformAsync(StartTransformRequest request, RequestOptions options,
ActionListener<StartTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::startDataFrameTransform,
TransformRequestConverters::startTransform,
options,
StartDataFrameTransformResponse::fromXContent,
StartTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
@ -314,12 +314,12 @@ public final class DataFrameClient {
* @return A response object indicating request success
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public StopDataFrameTransformResponse stopDataFrameTransform(StopDataFrameTransformRequest request, RequestOptions options)
public StopTransformResponse stopTransform(StopTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::stopDataFrameTransform,
TransformRequestConverters::stopTransform,
options,
StopDataFrameTransformResponse::fromXContent,
StopTransformResponse::fromXContent,
Collections.emptySet());
}
@ -334,12 +334,12 @@ public final class DataFrameClient {
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable stopDataFrameTransformAsync(StopDataFrameTransformRequest request, RequestOptions options,
ActionListener<StopDataFrameTransformResponse> listener) {
public Cancellable stopTransformAsync(StopTransformRequest request, RequestOptions options,
ActionListener<StopTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::stopDataFrameTransform,
TransformRequestConverters::stopTransform,
options,
StopDataFrameTransformResponse::fromXContent,
StopTransformResponse::fromXContent,
listener,
Collections.emptySet());
}
@ -353,15 +353,15 @@ public final class DataFrameClient {
*
* @param request The get transform request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return An GetDataFrameTransformResponse containing the requested transforms
* @return An GetTransformResponse containing the requested transforms
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetDataFrameTransformResponse getDataFrameTransform(GetDataFrameTransformRequest request, RequestOptions options)
public GetTransformResponse getTransform(GetTransformRequest request, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
DataFrameRequestConverters::getDataFrameTransform,
TransformRequestConverters::getTransform,
options,
GetDataFrameTransformResponse::fromXContent,
GetTransformResponse::fromXContent,
Collections.emptySet());
}
@ -376,12 +376,12 @@ public final class DataFrameClient {
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getDataFrameTransformAsync(GetDataFrameTransformRequest request, RequestOptions options,
ActionListener<GetDataFrameTransformResponse> listener) {
public Cancellable getTransformAsync(GetTransformRequest request, RequestOptions options,
ActionListener<GetTransformResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
DataFrameRequestConverters::getDataFrameTransform,
TransformRequestConverters::getTransform,
options,
GetDataFrameTransformResponse::fromXContent,
GetTransformResponse::fromXContent,
listener,
Collections.emptySet());
}

View file

@ -24,29 +24,29 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
import org.elasticsearch.client.transform.DeleteTransformRequest;
import org.elasticsearch.client.transform.GetTransformRequest;
import org.elasticsearch.client.transform.GetTransformStatsRequest;
import org.elasticsearch.client.transform.PreviewTransformRequest;
import org.elasticsearch.client.transform.PutTransformRequest;
import org.elasticsearch.client.transform.StartTransformRequest;
import org.elasticsearch.client.transform.StopTransformRequest;
import org.elasticsearch.client.transform.UpdateTransformRequest;
import org.elasticsearch.common.Strings;
import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
import static org.elasticsearch.client.transform.DeleteDataFrameTransformRequest.FORCE;
import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH;
import static org.elasticsearch.client.transform.PutDataFrameTransformRequest.DEFER_VALIDATION;
import static org.elasticsearch.client.transform.DeleteTransformRequest.FORCE;
import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH;
import static org.elasticsearch.client.transform.PutTransformRequest.DEFER_VALIDATION;
final class DataFrameRequestConverters {
final class TransformRequestConverters {
private DataFrameRequestConverters() {}
private TransformRequestConverters() {}
static Request putDataFrameTransform(PutDataFrameTransformRequest putRequest) throws IOException {
static Request putTransform(PutTransformRequest putRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(putRequest.getConfig().getId())
@ -59,7 +59,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request updateDataFrameTransform(UpdateDataFrameTransformRequest updateDataFrameTransformRequest) throws IOException {
static Request updateTransform(UpdateTransformRequest updateDataFrameTransformRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(updateDataFrameTransformRequest.getId())
@ -73,7 +73,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) {
static Request getTransform(GetTransformRequest getRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId()))
@ -91,7 +91,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest deleteRequest) {
static Request deleteTransform(DeleteTransformRequest deleteRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(deleteRequest.getId())
@ -103,7 +103,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request startDataFrameTransform(StartDataFrameTransformRequest startRequest) {
static Request startTransform(StartTransformRequest startRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(startRequest.getId())
@ -118,7 +118,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request stopDataFrameTransform(StopDataFrameTransformRequest stopRequest) {
static Request stopTransform(StopTransformRequest stopRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(stopRequest.getId())
@ -139,7 +139,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request previewDataFrameTransform(PreviewDataFrameTransformRequest previewRequest) throws IOException {
static Request previewTransform(PreviewTransformRequest previewRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms", "_preview")
.build();
@ -148,7 +148,7 @@ final class DataFrameRequestConverters {
return request;
}
static Request getDataFrameTransformStats(GetDataFrameTransformStatsRequest statsRequest) {
static Request getTransformStats(GetTransformStatsRequest statsRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_frame", "transforms")
.addPathPart(statsRequest.getId())

View file

@ -19,10 +19,8 @@
package org.elasticsearch.client.core;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -39,7 +37,7 @@ import static org.elasticsearch.action.search.SearchRequest.DEFAULT_INDICES_OPTI
/**
* Encapsulates a request to _count API against one, several or all indices.
*/
public final class CountRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject {
public final class CountRequest implements Validatable, ToXContentObject {
private String[] indices = Strings.EMPTY_ARRAY;
private String[] types = Strings.EMPTY_ARRAY;
@ -78,11 +76,6 @@ public final class CountRequest extends ActionRequest implements IndicesRequest.
this.query = Objects.requireNonNull(query, "query must not be null");;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
/**
* Sets the indices the count will be executed on.
*/

View file

@ -47,7 +47,6 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte
public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest";
private String[] indices = Strings.EMPTY_ARRAY;
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false);
private String[] types = Strings.EMPTY_ARRAY;
private String routing;
private TimeValue timeout;
@ -106,31 +105,6 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte
return this;
}
/**
* The document types to execute the explore against. Defaults to be executed against
* all types.
*
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public String[] types() {
return this.types;
}
/**
* The document types to execute the explore request against. Defaults to be executed against
* all types.
*
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public GraphExploreRequest types(String... types) {
this.types = types;
return this;
}
public String routing() {
return this.routing;
}
@ -174,7 +148,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte
@Override
public String toString() {
return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]";
return "graph explore [" + Arrays.toString(indices) + "]";
}
/**

View file

@ -113,8 +113,6 @@ public class DetailAnalyzeResponse {
private final String name;
private final AnalyzeResponse.AnalyzeToken[] tokens;
private static final String TOKENS = "tokens";
@Override
public boolean equals(Object o) {
if (this == o) return true;

View file

@ -20,15 +20,13 @@ package org.elasticsearch.client.indices;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.client.TimedRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -50,13 +48,12 @@ import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index template.
*/
public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateRequest> implements IndicesRequest, ToXContentFragment {
public class PutIndexTemplateRequest extends TimedRequest implements ToXContentFragment {
private String name;
@ -77,19 +74,11 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
private Integer version;
/**
* Constructs a new put index template request with the provided name.
* Constructs a new put index template request with the provided name and patterns.
*/
public PutIndexTemplateRequest(String name) {
public PutIndexTemplateRequest(String name, List<String> indexPatterns) {
this.name(name);
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (indexPatterns == null || indexPatterns.size() == 0) {
validationException = addValidationError("index patterns are missing", validationException);
}
return validationException;
this.patterns(indexPatterns);
}
/**
@ -111,6 +100,9 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
}
public PutIndexTemplateRequest patterns(List<String> indexPatterns) {
if (indexPatterns == null || indexPatterns.size() == 0) {
throw new IllegalArgumentException("index patterns are missing");
}
this.indexPatterns = indexPatterns;
return this;
}
@ -412,14 +404,21 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
return this;
}
@Override
public String[] indices() {
return indexPatterns.toArray(new String[indexPatterns.size()]);
/**
* @deprecated Use {@link #setMasterTimeout(TimeValue)} instead
*/
@Deprecated
public final PutIndexTemplateRequest masterNodeTimeout(TimeValue timeout) {
setMasterTimeout(timeout);
return this;
}
@Override
public IndicesOptions indicesOptions() {
return IndicesOptions.strictExpand();
/**
* @deprecated Use {@link #setMasterTimeout(TimeValue)} instead
*/
@Deprecated
public final PutIndexTemplateRequest masterNodeTimeout(String timeout) {
return masterNodeTimeout(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".masterNodeTimeout"));
}
@Override

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
@ -38,7 +37,7 @@ import java.util.Objects;
/**
* Request to close Machine Learning Jobs
*/
public class CloseJobRequest extends ActionRequest implements ToXContentObject {
public class CloseJobRequest implements ToXContentObject, Validatable {
public static final ParseField JOB_ID = new ParseField("job_id");
public static final ParseField TIMEOUT = new ParseField("timeout");
@ -144,11 +143,6 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject {
this.allowNoJobs = allowNoJobs;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobIds, timeout, force, allowNoJobs);

View file

@ -18,15 +18,14 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import java.util.Objects;
/**
* Request class for removing an event from an existing calendar
*/
public class DeleteCalendarEventRequest extends ActionRequest {
public class DeleteCalendarEventRequest implements Validatable {
private final String eventId;
private final String calendarId;
@ -51,11 +50,6 @@ public class DeleteCalendarEventRequest extends ActionRequest {
return calendarId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(eventId, calendarId);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import java.security.InvalidParameterException;
import java.util.Arrays;
@ -29,7 +28,7 @@ import java.util.Objects;
/**
* Request class for removing Machine Learning Jobs from an existing calendar
*/
public class DeleteCalendarJobRequest extends ActionRequest {
public class DeleteCalendarJobRequest implements Validatable {
private final List<String> jobIds;
private final String calendarId;
@ -61,11 +60,6 @@ public class DeleteCalendarJobRequest extends ActionRequest {
return calendarId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobIds, calendarId);

View file

@ -19,15 +19,14 @@
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import java.util.Objects;
/**
* Request to delete a Machine Learning Calendar
*/
public class DeleteCalendarRequest extends ActionRequest {
public class DeleteCalendarRequest implements Validatable {
private final String calendarId;
@ -43,11 +42,6 @@ public class DeleteCalendarRequest extends ActionRequest {
return calendarId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(calendarId);

View file

@ -18,15 +18,14 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import java.util.Objects;
/**
* Request to delete a Machine Learning Datafeed via its ID
*/
public class DeleteDatafeedRequest extends ActionRequest {
public class DeleteDatafeedRequest implements Validatable {
private String datafeedId;
private Boolean force;
@ -53,11 +52,6 @@ public class DeleteDatafeedRequest extends ActionRequest {
this.force = force;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(datafeedId, force);

View file

@ -18,13 +18,12 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
/**
* Request to delete expired model snapshots and forecasts
*/
public class DeleteExpiredDataRequest extends ActionRequest {
public class DeleteExpiredDataRequest implements Validatable {
/**
* Create a new request to delete expired data
@ -32,8 +31,4 @@ public class DeleteExpiredDataRequest extends ActionRequest {
public DeleteExpiredDataRequest() {
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -37,7 +36,7 @@ import java.util.Objects;
/**
* POJO for a delete forecast request
*/
public class DeleteForecastRequest extends ActionRequest implements ToXContentObject {
public class DeleteForecastRequest implements Validatable, ToXContentObject {
public static final ParseField FORECAST_ID = new ParseField("forecast_id");
public static final ParseField ALLOW_NO_FORECASTS = new ParseField("allow_no_forecasts");
@ -159,11 +158,6 @@ public class DeleteForecastRequest extends ActionRequest implements ToXContentOb
return Objects.hash(jobId, forecastIds, allowNoForecasts, timeout);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,15 +18,14 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import java.util.Objects;
/**
* Request to delete a Machine Learning Job via its ID
*/
public class DeleteJobRequest extends ActionRequest {
public class DeleteJobRequest implements Validatable {
private String jobId;
private Boolean force;
@ -76,11 +75,6 @@ public class DeleteJobRequest extends ActionRequest {
this.waitForCompletion = waitForCompletion;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobId, force);

View file

@ -18,18 +18,16 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import java.util.Objects;
/**
* Request to delete a Machine Learning Model Snapshot Job via its Job and Snapshot IDs
*/
public class DeleteModelSnapshotRequest extends ActionRequest {
public class DeleteModelSnapshotRequest implements Validatable {
private final String jobId;
private final String snapshotId;
@ -47,11 +45,6 @@ public class DeleteModelSnapshotRequest extends ActionRequest {
return snapshotId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobId, snapshotId);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -32,7 +31,7 @@ import java.util.Objects;
/**
* Request object to flush a given Machine Learning job.
*/
public class FlushJobRequest extends ActionRequest implements ToXContentObject {
public class FlushJobRequest implements Validatable, ToXContentObject {
public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
public static final ParseField START = new ParseField("start");
@ -188,8 +187,4 @@ public class FlushJobRequest extends ActionRequest implements ToXContentObject {
return builder;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
@ -34,7 +33,7 @@ import java.util.Objects;
/**
* Pojo for forecasting an existing and open Machine Learning Job
*/
public class ForecastJobRequest extends ActionRequest implements ToXContentObject {
public class ForecastJobRequest implements Validatable, ToXContentObject {
public static final ParseField DURATION = new ParseField("duration");
public static final ParseField EXPIRES_IN = new ParseField("expires_in");
@ -133,8 +132,4 @@ public class ForecastJobRequest extends ActionRequest implements ToXContentObjec
return builder;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.results.Result;
@ -34,7 +33,7 @@ import java.util.Objects;
/**
* A request to retrieve buckets of a given job
*/
public class GetBucketsRequest extends ActionRequest implements ToXContentObject {
public class GetBucketsRequest implements Validatable, ToXContentObject {
public static final ParseField EXPAND = new ParseField("expand");
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
@ -199,11 +198,6 @@ public class GetBucketsRequest extends ActionRequest implements ToXContentObject
this.descending = descending;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -19,8 +19,7 @@
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.job.config.Job;
@ -35,7 +34,7 @@ import java.util.Objects;
/**
* Get the Scheduled Events for a Calendar
*/
public class GetCalendarEventsRequest extends ActionRequest implements ToXContentObject {
public class GetCalendarEventsRequest implements Validatable, ToXContentObject {
public static final ParseField START = new ParseField("start");
public static final ParseField END = new ParseField("end");
@ -121,11 +120,6 @@ public class GetCalendarEventsRequest extends ActionRequest implements ToXConten
this.jobId = jobId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -19,8 +19,7 @@
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -30,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public class GetCalendarsRequest extends ActionRequest implements ToXContentObject {
public class GetCalendarsRequest implements Validatable, ToXContentObject {
public static final ObjectParser<GetCalendarsRequest, Void> PARSER =
new ObjectParser<>("get_calendars_request", GetCalendarsRequest::new);
@ -66,11 +65,6 @@ public class GetCalendarsRequest extends ActionRequest implements ToXContentObje
this.pageParams = pageParams;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* A request to retrieve categories of a given job
*/
public class GetCategoriesRequest extends ActionRequest implements ToXContentObject {
public class GetCategoriesRequest implements Validatable, ToXContentObject {
public static final ParseField CATEGORY_ID = new ParseField("category_id");
@ -88,11 +87,6 @@ public class GetCategoriesRequest extends ActionRequest implements ToXContentObj
this.pageParams = pageParams;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -38,7 +37,7 @@ import java.util.Objects;
* {@code _all} explicitly gets all the datafeeds in the cluster
* An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds in the cluster
*/
public class GetDatafeedRequest extends ActionRequest implements ToXContentObject {
public class GetDatafeedRequest implements Validatable, ToXContentObject {
public static final ParseField DATAFEED_IDS = new ParseField("datafeed_ids");
public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds");
@ -101,11 +100,6 @@ public class GetDatafeedRequest extends ActionRequest implements ToXContentObjec
return allowNoDatafeeds;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(datafeedIds, allowNoDatafeeds);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -41,7 +40,7 @@ import java.util.Objects;
* {@code _all} explicitly gets all the datafeeds' statistics in the cluster
* An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster
*/
public class GetDatafeedStatsRequest extends ActionRequest implements ToXContentObject {
public class GetDatafeedStatsRequest implements Validatable, ToXContentObject {
public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds");
@ -128,11 +127,6 @@ public class GetDatafeedStatsRequest extends ActionRequest implements ToXContent
Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -32,7 +31,7 @@ import java.util.Objects;
/**
* A request to retrieve {@link MlFilter}s
*/
public class GetFiltersRequest extends ActionRequest implements ToXContentObject {
public class GetFiltersRequest implements Validatable, ToXContentObject {
public static final ObjectParser<GetFiltersRequest, Void> PARSER =
new ObjectParser<>("get_filters_request", GetFiltersRequest::new);
@ -83,11 +82,6 @@ public class GetFiltersRequest extends ActionRequest implements ToXContentObject
this.size = size;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* A request to retrieve influencers of a given job
*/
public class GetInfluencersRequest extends ActionRequest implements ToXContentObject {
public class GetInfluencersRequest implements Validatable, ToXContentObject {
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
public static final ParseField START = new ParseField("start");
@ -167,11 +166,6 @@ public class GetInfluencersRequest extends ActionRequest implements ToXContentOb
this.descending = descending;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -39,7 +38,7 @@ import java.util.Objects;
* {@code _all} explicitly gets all the jobs in the cluster
* An empty request (no {@code jobId}s) implicitly gets all the jobs in the cluster
*/
public class GetJobRequest extends ActionRequest implements ToXContentObject {
public class GetJobRequest implements Validatable, ToXContentObject {
public static final ParseField JOB_IDS = new ParseField("job_ids");
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
@ -101,11 +100,6 @@ public class GetJobRequest extends ActionRequest implements ToXContentObject {
return allowNoJobs;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobIds, allowNoJobs);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -41,7 +40,7 @@ import java.util.Objects;
* {@code _all} explicitly gets all the jobs' statistics in the cluster
* An empty request (no {@code jobId}s) implicitly gets all the jobs' statistics in the cluster
*/
public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
public class GetJobStatsRequest implements Validatable, ToXContentObject {
public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
@ -128,11 +127,6 @@ public class GetJobStatsRequest extends ActionRequest implements ToXContentObjec
Objects.equals(allowNoJobs, that.allowNoJobs);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* A request to retrieve information about model snapshots for a given job
*/
public class GetModelSnapshotsRequest extends ActionRequest implements ToXContentObject {
public class GetModelSnapshotsRequest implements Validatable, ToXContentObject {
public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id");
@ -153,11 +152,6 @@ public class GetModelSnapshotsRequest extends ActionRequest implements ToXConten
this.desc = desc;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -37,7 +36,7 @@ import java.util.Objects;
/**
* A request to retrieve overall buckets of set of jobs
*/
public class GetOverallBucketsRequest extends ActionRequest implements ToXContentObject {
public class GetOverallBucketsRequest implements Validatable, ToXContentObject {
public static final ParseField TOP_N = new ParseField("top_n");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
@ -203,11 +202,6 @@ public class GetOverallBucketsRequest extends ActionRequest implements ToXConten
return allowNoJobs;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -35,7 +34,7 @@ import java.util.Objects;
/**
* Request to open a Machine Learning Job
*/
public class OpenJobRequest extends ActionRequest implements ToXContentObject {
public class OpenJobRequest implements Validatable, ToXContentObject {
public static final ParseField TIMEOUT = new ParseField("timeout");
public static final ConstructingObjectParser<OpenJobRequest, Void> PARSER = new ConstructingObjectParser<>(
@ -88,11 +87,6 @@ public class OpenJobRequest extends ActionRequest implements ToXContentObject {
this.timeout = timeout;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.common.ParseField;
@ -35,7 +34,7 @@ import java.util.Objects;
/**
* Request to add a ScheduledEvent to a Machine Learning calendar
*/
public class PostCalendarEventRequest extends ActionRequest implements ToXContentObject {
public class PostCalendarEventRequest implements Validatable, ToXContentObject {
private final String calendarId;
private final List<ScheduledEvent> scheduledEvents;
@ -78,11 +77,6 @@ public class PostCalendarEventRequest extends ActionRequest implements ToXConten
return scheduledEvents;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesArray;
@ -40,7 +39,7 @@ import java.util.Objects;
/**
* Request to post data to a Machine Learning job
*/
public class PostDataRequest extends ActionRequest implements ToXContentObject {
public class PostDataRequest implements Validatable, ToXContentObject {
public static final ParseField RESET_START = new ParseField("reset_start");
public static final ParseField RESET_END = new ParseField("reset_end");
@ -159,11 +158,6 @@ public class PostDataRequest extends ActionRequest implements ToXContentObject {
Objects.equals(xContentType, other.xContentType);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* Request to preview a MachineLearning Datafeed
*/
public class PreviewDatafeedRequest extends ActionRequest implements ToXContentObject {
public class PreviewDatafeedRequest implements Validatable, ToXContentObject {
public static final ConstructingObjectParser<PreviewDatafeedRequest, Void> PARSER = new ConstructingObjectParser<>(
"open_datafeed_request", true, a -> new PreviewDatafeedRequest((String) a[0]));
@ -61,11 +60,6 @@ public class PreviewDatafeedRequest extends ActionRequest implements ToXContentO
return datafeedId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import java.security.InvalidParameterException;
import java.util.Arrays;
@ -29,7 +28,7 @@ import java.util.Objects;
/**
* Request class for adding Machine Learning Jobs to an existing calendar
*/
public class PutCalendarJobRequest extends ActionRequest {
public class PutCalendarJobRequest implements Validatable {
private final List<String> jobIds;
private final String calendarId;
@ -61,11 +60,6 @@ public class PutCalendarJobRequest extends ActionRequest {
return calendarId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobIds, calendarId);

View file

@ -19,8 +19,7 @@
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Request to create a new Machine Learning calendar
*/
public class PutCalendarRequest extends ActionRequest implements ToXContentObject {
public class PutCalendarRequest implements Validatable, ToXContentObject {
private final Calendar calendar;
@ -43,11 +42,6 @@ public class PutCalendarRequest extends ActionRequest implements ToXContentObjec
return calendar;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
calendar.toXContent(builder, params);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Request to create a new Machine Learning Datafeed given a {@link DatafeedConfig} configuration
*/
public class PutDatafeedRequest extends ActionRequest implements ToXContentObject {
public class PutDatafeedRequest implements Validatable, ToXContentObject {
private final DatafeedConfig datafeed;
@ -77,8 +76,4 @@ public class PutDatafeedRequest extends ActionRequest implements ToXContentObjec
return Strings.toString(this);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Request to create a new Machine Learning MlFilter given a {@link MlFilter} configuration
*/
public class PutFilterRequest extends ActionRequest implements ToXContentObject {
public class PutFilterRequest implements Validatable, ToXContentObject {
private final MlFilter filter;
@ -77,8 +76,4 @@ public class PutFilterRequest extends ActionRequest implements ToXContentObject
return Strings.toString(this);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -31,7 +30,7 @@ import java.util.Objects;
/**
* Request to create a new Machine Learning Job given a {@link Job} configuration
*/
public class PutJobRequest extends ActionRequest implements ToXContentObject {
public class PutJobRequest implements Validatable, ToXContentObject {
private final Job job;
@ -77,8 +76,4 @@ public class PutJobRequest extends ActionRequest implements ToXContentObject {
return Strings.toString(this);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.ParseField;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* A request to revert to a specific model snapshot for a given job
*/
public class RevertModelSnapshotRequest extends ActionRequest implements ToXContentObject {
public class RevertModelSnapshotRequest implements Validatable, ToXContentObject {
public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results");
@ -82,11 +81,6 @@ public class RevertModelSnapshotRequest extends ActionRequest implements ToXCont
this.deleteInterveningResults = deleteInterveningResults;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
@ -28,7 +27,7 @@ import java.util.Objects;
/**
* Sets ML into upgrade_mode
*/
public class SetUpgradeModeRequest extends ActionRequest {
public class SetUpgradeModeRequest implements Validatable {
public static final ParseField ENABLED = new ParseField("enabled");
@ -67,11 +66,6 @@ public class SetUpgradeModeRequest extends ActionRequest {
this.timeout = timeout;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(enabled, timeout);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
@ -33,7 +32,7 @@ import java.util.Objects;
/**
* Request to start a Datafeed
*/
public class StartDatafeedRequest extends ActionRequest implements ToXContentObject {
public class StartDatafeedRequest implements Validatable, ToXContentObject {
public static final ParseField START = new ParseField("start");
public static final ParseField END = new ParseField("end");
@ -114,11 +113,6 @@ public class StartDatafeedRequest extends ActionRequest implements ToXContentObj
this.timeout = timeout;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(datafeedId, start, end, timeout);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -39,7 +38,7 @@ import java.util.Objects;
/**
* Request to stop Machine Learning Datafeeds
*/
public class StopDatafeedRequest extends ActionRequest implements ToXContentObject {
public class StopDatafeedRequest implements Validatable, ToXContentObject {
public static final ParseField TIMEOUT = new ParseField("timeout");
public static final ParseField FORCE = new ParseField("force");
@ -144,11 +143,6 @@ public class StopDatafeedRequest extends ActionRequest implements ToXContentObje
this.allowNoDatafeeds = allowNoDatafeeds;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(datafeedIds, timeout, force, allowNoDatafeeds);

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -32,7 +31,7 @@ import java.util.Objects;
* Requests an update to a {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} with the passed {@link DatafeedUpdate}
* settings
*/
public class UpdateDatafeedRequest extends ActionRequest implements ToXContentObject {
public class UpdateDatafeedRequest implements Validatable, ToXContentObject {
private final DatafeedUpdate update;
@ -73,8 +72,4 @@ public class UpdateDatafeedRequest extends ActionRequest implements ToXContentOb
return Strings.toString(this);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -36,7 +35,7 @@ import java.util.TreeSet;
/**
* Updates an existing {@link MlFilter} configuration
*/
public class UpdateFilterRequest extends ActionRequest implements ToXContentObject {
public class UpdateFilterRequest implements Validatable, ToXContentObject {
public static final ParseField ADD_ITEMS = new ParseField("add_items");
public static final ParseField REMOVE_ITEMS = new ParseField("remove_items");
@ -149,8 +148,4 @@ public class UpdateFilterRequest extends ActionRequest implements ToXContentObje
return Strings.toString(this);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -32,7 +31,7 @@ import java.util.Objects;
* Updates a {@link org.elasticsearch.client.ml.job.config.Job} with the passed {@link JobUpdate}
* settings
*/
public class UpdateJobRequest extends ActionRequest implements ToXContentObject {
public class UpdateJobRequest implements Validatable, ToXContentObject {
private final JobUpdate update;
@ -73,8 +72,4 @@ public class UpdateJobRequest extends ActionRequest implements ToXContentObject
return Strings.toString(this);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}

View file

@ -18,8 +18,7 @@
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -32,7 +31,7 @@ import java.util.Objects;
/**
* A request to update information about an existing model snapshot for a given job
*/
public class UpdateModelSnapshotRequest extends ActionRequest implements ToXContentObject {
public class UpdateModelSnapshotRequest implements Validatable, ToXContentObject {
public static final ConstructingObjectParser<UpdateModelSnapshotRequest, Void> PARSER = new ConstructingObjectParser<>(
@ -93,11 +92,6 @@ public class UpdateModelSnapshotRequest extends ActionRequest implements ToXCont
this.retain = retain;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View file

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree;
import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding;
import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding;
import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor;
import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import java.util.ArrayList;
import java.util.List;
public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
// PreProcessing
namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME),
OneHotEncoding::fromXContent));
namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME),
TargetMeanEncoding::fromXContent));
namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME),
FrequencyEncoding::fromXContent));
// Model
namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent));
return namedXContent;
}
}

View file

@ -0,0 +1,161 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.preprocessing;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* PreProcessor for frequency encoding a set of categorical values for a given field.
*/
public class FrequencyEncoding implements PreProcessor {
public static final String NAME = "frequency_encoding";
public static final ParseField FIELD = new ParseField("field");
public static final ParseField FEATURE_NAME = new ParseField("feature_name");
public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<FrequencyEncoding, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new FrequencyEncoding((String)a[0], (String)a[1], (Map<String, Double>)a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME);
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
(p, c) -> p.map(HashMap::new, XContentParser::doubleValue),
FREQUENCY_MAP);
}
public static FrequencyEncoding fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final String field;
private final String featureName;
private final Map<String, Double> frequencyMap;
public FrequencyEncoding(String field, String featureName, Map<String, Double> frequencyMap) {
this.field = Objects.requireNonNull(field);
this.featureName = Objects.requireNonNull(featureName);
this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap));
}
/**
* @return Field name on which to frequency encode
*/
public String getField() {
return field;
}
/**
* @return Map of Value: frequency for the frequency encoding
*/
public Map<String, Double> getFrequencyMap() {
return frequencyMap;
}
/**
* @return The encoded feature name
*/
public String getFeatureName() {
return featureName;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(FIELD.getPreferredName(), field);
builder.field(FEATURE_NAME.getPreferredName(), featureName);
builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FrequencyEncoding that = (FrequencyEncoding) o;
return Objects.equals(field, that.field)
&& Objects.equals(featureName, that.featureName)
&& Objects.equals(frequencyMap, that.frequencyMap);
}
@Override
public int hashCode() {
return Objects.hash(field, featureName, frequencyMap);
}
public Builder builder(String field) {
return new Builder(field);
}
public static class Builder {
private String field;
private String featureName;
private Map<String, Double> frequencyMap = new HashMap<>();
public Builder(String field) {
this.field = field;
}
public Builder setField(String field) {
this.field = field;
return this;
}
public Builder setFeatureName(String featureName) {
this.featureName = featureName;
return this;
}
public Builder setFrequencyMap(Map<String, Double> frequencyMap) {
this.frequencyMap = new HashMap<>(frequencyMap);
return this;
}
public Builder addFrequency(String valueName, double frequency) {
this.frequencyMap.put(valueName, frequency);
return this;
}
public FrequencyEncoding build() {
return new FrequencyEncoding(field, featureName, frequencyMap);
}
}
}

View file

@ -0,0 +1,138 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.preprocessing;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* PreProcessor for one hot encoding a set of categorical values for a given field.
*/
public class OneHotEncoding implements PreProcessor {
public static final String NAME = "one_hot_encoding";
public static final ParseField FIELD = new ParseField("field");
public static final ParseField HOT_MAP = new ParseField("hot_map");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<OneHotEncoding, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new OneHotEncoding((String)a[0], (Map<String, String>)a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP);
}
public static OneHotEncoding fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final String field;
private final Map<String, String> hotMap;
public OneHotEncoding(String field, Map<String, String> hotMap) {
this.field = Objects.requireNonNull(field);
this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap));
}
/**
* @return Field name on which to one hot encode
*/
public String getField() {
return field;
}
/**
* @return Map of Value: ColumnName for the one hot encoding
*/
public Map<String, String> getHotMap() {
return hotMap;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(FIELD.getPreferredName(), field);
builder.field(HOT_MAP.getPreferredName(), hotMap);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OneHotEncoding that = (OneHotEncoding) o;
return Objects.equals(field, that.field)
&& Objects.equals(hotMap, that.hotMap);
}
@Override
public int hashCode() {
return Objects.hash(field, hotMap);
}
public Builder builder(String field) {
return new Builder(field);
}
public static class Builder {
private String field;
private Map<String, String> hotMap = new HashMap<>();
public Builder(String field) {
this.field = field;
}
public Builder setField(String field) {
this.field = field;
return this;
}
public Builder setHotMap(Map<String, String> hotMap) {
this.hotMap = new HashMap<>(hotMap);
return this;
}
public Builder addOneHot(String valueName, String oneHotFeatureName) {
this.hotMap.put(valueName, oneHotFeatureName);
return this;
}
public OneHotEncoding build() {
return new OneHotEncoding(field, hotMap);
}
}
}

View file

@ -0,0 +1,33 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.preprocessing;
import org.elasticsearch.common.xcontent.ToXContentObject;
/**
* Describes a pre-processor for a defined machine learning model
*/
public interface PreProcessor extends ToXContentObject {
/**
* @return The name of the pre-processor
*/
String getName();
}

View file

@ -0,0 +1,183 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.preprocessing;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* PreProcessor for target mean encoding a set of categorical values for a given field.
*/
public class TargetMeanEncoding implements PreProcessor {
public static final String NAME = "target_mean_encoding";
public static final ParseField FIELD = new ParseField("field");
public static final ParseField FEATURE_NAME = new ParseField("feature_name");
public static final ParseField TARGET_MEANS = new ParseField("target_means");
public static final ParseField DEFAULT_VALUE = new ParseField("default_value");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<TargetMeanEncoding, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new TargetMeanEncoding((String)a[0], (String)a[1], (Map<String, Double>)a[2], (Double)a[3]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME);
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
(p, c) -> p.map(HashMap::new, XContentParser::doubleValue),
TARGET_MEANS);
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE);
}
public static TargetMeanEncoding fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final String field;
private final String featureName;
private final Map<String, Double> meanMap;
private final double defaultValue;
public TargetMeanEncoding(String field, String featureName, Map<String, Double> meanMap, Double defaultValue) {
this.field = Objects.requireNonNull(field);
this.featureName = Objects.requireNonNull(featureName);
this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap));
this.defaultValue = Objects.requireNonNull(defaultValue);
}
/**
* @return Field name on which to target mean encode
*/
public String getField() {
return field;
}
/**
* @return Map of Value: targetMean for the target mean encoding
*/
public Map<String, Double> getMeanMap() {
return meanMap;
}
/**
* @return The default value to set when a previously unobserved value is seen
*/
public double getDefaultValue() {
return defaultValue;
}
/**
* @return The feature name for the encoded value
*/
public String getFeatureName() {
return featureName;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(FIELD.getPreferredName(), field);
builder.field(FEATURE_NAME.getPreferredName(), featureName);
builder.field(TARGET_MEANS.getPreferredName(), meanMap);
builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TargetMeanEncoding that = (TargetMeanEncoding) o;
return Objects.equals(field, that.field)
&& Objects.equals(featureName, that.featureName)
&& Objects.equals(meanMap, that.meanMap)
&& Objects.equals(defaultValue, that.defaultValue);
}
@Override
public int hashCode() {
return Objects.hash(field, featureName, meanMap, defaultValue);
}
public Builder builder(String field) {
return new Builder(field);
}
public static class Builder {
private String field;
private String featureName;
private Map<String, Double> meanMap = new HashMap<>();
private double defaultValue;
public Builder(String field) {
this.field = field;
}
public String getField() {
return field;
}
public Builder setField(String field) {
this.field = field;
return this;
}
public Builder setFeatureName(String featureName) {
this.featureName = featureName;
return this;
}
public Builder setMeanMap(Map<String, Double> meanMap) {
this.meanMap = meanMap;
return this;
}
public Builder addMeanMapEntry(String valueName, double meanEncoding) {
this.meanMap.put(valueName, meanEncoding);
return this;
}
public Builder setDefaultValue(double defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public TargetMeanEncoding build() {
return new TargetMeanEncoding(field, featureName, meanMap, defaultValue);
}
}
}

View file

@ -16,26 +16,21 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel;
package org.elasticsearch.search;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.internal.SearchContext;
import java.util.List;
import java.io.IOException;
public interface TrainedModel extends ToXContentObject {
public class SearchContextException extends SearchException {
public SearchContextException(SearchContext context, String msg) {
super(context.shardTarget(), msg);
}
public SearchContextException(SearchContext context, String msg, Throwable t) {
super(context.shardTarget(), msg, t);
}
public SearchContextException(StreamInput in) throws IOException {
super(in);
}
/**
* @return List of featureNames expected by the model. In the order that they are expected
*/
List<String> getFeatureNames();
/**
* @return The name of the model
*/
String getName();
}

View file

@ -0,0 +1,192 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class Tree implements TrainedModel {
public static final String NAME = "tree";
public static final ParseField FEATURE_NAMES = new ParseField("feature_names");
public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure");
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME, true, Builder::new);
static {
PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES);
PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE);
}
public static Tree fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
private final List<String> featureNames;
private final List<TreeNode> nodes;
Tree(List<String> featureNames, List<TreeNode> nodes) {
this.featureNames = Collections.unmodifiableList(Objects.requireNonNull(featureNames));
this.nodes = Collections.unmodifiableList(Objects.requireNonNull(nodes));
}
@Override
public String getName() {
return NAME;
}
@Override
public List<String> getFeatureNames() {
return featureNames;
}
public List<TreeNode> getNodes() {
return nodes;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
builder.field(TREE_STRUCTURE.getPreferredName(), nodes);
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Tree that = (Tree) o;
return Objects.equals(featureNames, that.featureNames)
&& Objects.equals(nodes, that.nodes);
}
@Override
public int hashCode() {
return Objects.hash(featureNames, nodes);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private List<String> featureNames;
private ArrayList<TreeNode.Builder> nodes;
private int numNodes;
public Builder() {
nodes = new ArrayList<>();
// allocate space in the root node and set to a leaf
nodes.add(null);
addLeaf(0, 0.0);
numNodes = 1;
}
public Builder setFeatureNames(List<String> featureNames) {
this.featureNames = featureNames;
return this;
}
public Builder addNode(TreeNode.Builder node) {
nodes.add(node);
return this;
}
public Builder setNodes(List<TreeNode.Builder> nodes) {
this.nodes = new ArrayList<>(nodes);
return this;
}
public Builder setNodes(TreeNode.Builder... nodes) {
return setNodes(Arrays.asList(nodes));
}
/**
* Add a decision node. Space for the child nodes is allocated
* @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index
* @param featureIndex The feature index the decision is made on
* @param isDefaultLeft Default left branch if the feature is missing
* @param decisionThreshold The decision threshold
* @return The created node
*/
public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) {
int leftChild = numNodes++;
int rightChild = numNodes++;
nodes.ensureCapacity(nodeIndex + 1);
for (int i = nodes.size(); i < nodeIndex + 1; i++) {
nodes.add(null);
}
TreeNode.Builder node = TreeNode.builder(nodeIndex)
.setDefaultLeft(isDefaultLeft)
.setLeftChild(leftChild)
.setRightChild(rightChild)
.setSplitFeature(featureIndex)
.setThreshold(decisionThreshold);
nodes.set(nodeIndex, node);
// allocate space for the child nodes
while (nodes.size() <= rightChild) {
nodes.add(null);
}
return node;
}
/**
* Sets the node at {@code nodeIndex} to a leaf node.
* @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)}
* @param value The prediction value
* @return this
*/
public Builder addLeaf(int nodeIndex, double value) {
for (int i = nodes.size(); i < nodeIndex + 1; i++) {
nodes.add(null);
}
nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(value));
return this;
}
public Tree build() {
return new Tree(featureNames,
nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()));
}
}
}

View file

@ -0,0 +1,280 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
import org.elasticsearch.client.ml.job.config.Operator;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
public class TreeNode implements ToXContentObject {
public static final String NAME = "tree_node";
public static final ParseField DECISION_TYPE = new ParseField("decision_type");
public static final ParseField THRESHOLD = new ParseField("threshold");
public static final ParseField LEFT_CHILD = new ParseField("left_child");
public static final ParseField RIGHT_CHILD = new ParseField("right_child");
public static final ParseField DEFAULT_LEFT = new ParseField("default_left");
public static final ParseField SPLIT_FEATURE = new ParseField("split_feature");
public static final ParseField NODE_INDEX = new ParseField("node_index");
public static final ParseField SPLIT_GAIN = new ParseField("split_gain");
public static final ParseField LEAF_VALUE = new ParseField("leaf_value");
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(
NAME,
true,
Builder::new);
static {
PARSER.declareDouble(Builder::setThreshold, THRESHOLD);
PARSER.declareField(Builder::setOperator,
p -> Operator.fromString(p.text()),
DECISION_TYPE,
ObjectParser.ValueType.STRING);
PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD);
PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD);
PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT);
PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE);
PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX);
PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN);
PARSER.declareDouble(Builder::setLeafValue, LEAF_VALUE);
}
public static Builder fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final Operator operator;
private final Double threshold;
private final Integer splitFeature;
private final int nodeIndex;
private final Double splitGain;
private final Double leafValue;
private final Boolean defaultLeft;
private final Integer leftChild;
private final Integer rightChild;
TreeNode(Operator operator,
Double threshold,
Integer splitFeature,
int nodeIndex,
Double splitGain,
Double leafValue,
Boolean defaultLeft,
Integer leftChild,
Integer rightChild) {
this.operator = operator;
this.threshold = threshold;
this.splitFeature = splitFeature;
this.nodeIndex = nodeIndex;
this.splitGain = splitGain;
this.leafValue = leafValue;
this.defaultLeft = defaultLeft;
this.leftChild = leftChild;
this.rightChild = rightChild;
}
public Operator getOperator() {
return operator;
}
public Double getThreshold() {
return threshold;
}
public Integer getSplitFeature() {
return splitFeature;
}
public Integer getNodeIndex() {
return nodeIndex;
}
public Double getSplitGain() {
return splitGain;
}
public Double getLeafValue() {
return leafValue;
}
public Boolean isDefaultLeft() {
return defaultLeft;
}
public Integer getLeftChild() {
return leftChild;
}
public Integer getRightChild() {
return rightChild;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
addOptionalField(builder, DECISION_TYPE, operator);
addOptionalField(builder, THRESHOLD, threshold);
addOptionalField(builder, SPLIT_FEATURE, splitFeature);
addOptionalField(builder, SPLIT_GAIN, splitGain);
addOptionalField(builder, NODE_INDEX, nodeIndex);
addOptionalField(builder, LEAF_VALUE, leafValue);
addOptionalField(builder, DEFAULT_LEFT, defaultLeft );
addOptionalField(builder, LEFT_CHILD, leftChild);
addOptionalField(builder, RIGHT_CHILD, rightChild);
builder.endObject();
return builder;
}
private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException {
if (value != null) {
builder.field(field.getPreferredName(), value);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TreeNode that = (TreeNode) o;
return Objects.equals(operator, that.operator)
&& Objects.equals(threshold, that.threshold)
&& Objects.equals(splitFeature, that.splitFeature)
&& Objects.equals(nodeIndex, that.nodeIndex)
&& Objects.equals(splitGain, that.splitGain)
&& Objects.equals(leafValue, that.leafValue)
&& Objects.equals(defaultLeft, that.defaultLeft)
&& Objects.equals(leftChild, that.leftChild)
&& Objects.equals(rightChild, that.rightChild);
}
@Override
public int hashCode() {
return Objects.hash(operator,
threshold,
splitFeature,
splitGain,
nodeIndex,
leafValue,
defaultLeft,
leftChild,
rightChild);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static Builder builder(int nodeIndex) {
return new Builder(nodeIndex);
}
public static class Builder {
private Operator operator;
private Double threshold;
private Integer splitFeature;
private int nodeIndex;
private Double splitGain;
private Double leafValue;
private Boolean defaultLeft;
private Integer leftChild;
private Integer rightChild;
public Builder(int nodeIndex) {
nodeIndex = nodeIndex;
}
private Builder() {
}
public Builder setOperator(Operator operator) {
this.operator = operator;
return this;
}
public Builder setThreshold(Double threshold) {
this.threshold = threshold;
return this;
}
public Builder setSplitFeature(Integer splitFeature) {
this.splitFeature = splitFeature;
return this;
}
public Builder setNodeIndex(int nodeIndex) {
this.nodeIndex = nodeIndex;
return this;
}
public Builder setSplitGain(Double splitGain) {
this.splitGain = splitGain;
return this;
}
public Builder setLeafValue(Double leafValue) {
this.leafValue = leafValue;
return this;
}
public Builder setDefaultLeft(Boolean defaultLeft) {
this.defaultLeft = defaultLeft;
return this;
}
public Builder setLeftChild(Integer leftChild) {
this.leftChild = leftChild;
return this;
}
public Integer getLeftChild() {
return leftChild;
}
public Builder setRightChild(Integer rightChild) {
this.rightChild = rightChild;
return this;
}
public Integer getRightChild() {
return rightChild;
}
public TreeNode build() {
return new TreeNode(operator,
threshold,
splitFeature,
nodeIndex,
splitGain,
leafValue,
defaultLeft,
leftChild,
rightChild);
}
}
}

View file

@ -74,7 +74,7 @@ public class SnapshotLifecycleStats implements ToXContentObject {
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS);
PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> SnapshotPolicyStats.parse(p, n), POLICY_STATS);
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS);
}
// Package visible for testing
@ -178,22 +178,25 @@ public class SnapshotLifecycleStats implements ToXContentObject {
private final long snapshotsDeleted;
private final long snapshotDeleteFailures;
public static final ParseField POLICY_ID = new ParseField("policy");
static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken");
static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed");
static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted");
static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures");
private static final ConstructingObjectParser<SnapshotPolicyStats, String> PARSER =
private static final ConstructingObjectParser<SnapshotPolicyStats, Void> PARSER =
new ConstructingObjectParser<>("snapshot_policy_stats", true,
(a, id) -> {
long taken = (long) a[0];
long failed = (long) a[1];
long deleted = (long) a[2];
long deleteFailed = (long) a[3];
a -> {
String id = (String) a[0];
long taken = (long) a[1];
long failed = (long) a[2];
long deleted = (long) a[3];
long deleteFailed = (long) a[4];
return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed);
});
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED);
@ -209,7 +212,11 @@ public class SnapshotLifecycleStats implements ToXContentObject {
}
public static SnapshotPolicyStats parse(XContentParser parser, String policyId) {
return PARSER.apply(parser, policyId);
return PARSER.apply(parser, null);
}
public String getPolicyId() {
return policyId;
}
public long getSnapshotsTaken() {

View file

@ -27,16 +27,16 @@ import java.util.Optional;
/**
* Request to delete a data frame transform
* Request to delete a transform
*/
public class DeleteDataFrameTransformRequest implements Validatable {
public class DeleteTransformRequest implements Validatable {
public static final String FORCE = "force";
private final String id;
private Boolean force;
public DeleteDataFrameTransformRequest(String id) {
public DeleteTransformRequest(String id) {
this.id = id;
}
@ -56,7 +56,7 @@ public class DeleteDataFrameTransformRequest implements Validatable {
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("data frame transform id must not be null");
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
@ -77,7 +77,7 @@ public class DeleteDataFrameTransformRequest implements Validatable {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DeleteDataFrameTransformRequest other = (DeleteDataFrameTransformRequest) obj;
DeleteTransformRequest other = (DeleteTransformRequest) obj;
return Objects.equals(id, other.id) && Objects.equals(force, other.force);
}
}

View file

@ -28,22 +28,22 @@ import java.util.List;
import java.util.Objects;
import java.util.Optional;
public class GetDataFrameTransformRequest implements Validatable {
public class GetTransformRequest implements Validatable {
public static final String ALLOW_NO_MATCH = "allow_no_match";
/**
* Helper method to create a request that will get ALL Data Frame Transforms
* @return new {@link GetDataFrameTransformRequest} object for the id "_all"
* Helper method to create a request that will get ALL Transforms
* @return new {@link GetTransformRequest} object for the id "_all"
*/
public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() {
return new GetDataFrameTransformRequest("_all");
public static GetTransformRequest getAllTransformRequest() {
return new GetTransformRequest("_all");
}
private final List<String> ids;
private PageParams pageParams;
private Boolean allowNoMatch;
public GetDataFrameTransformRequest(String... ids) {
public GetTransformRequest(String... ids) {
this.ids = Arrays.asList(ids);
}
@ -71,7 +71,7 @@ public class GetDataFrameTransformRequest implements Validatable {
public Optional<ValidationException> validate() {
if (ids == null || ids.isEmpty()) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("data frame transform id must not be null");
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
@ -92,7 +92,7 @@ public class GetDataFrameTransformRequest implements Validatable {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj;
GetTransformRequest other = (GetTransformRequest) obj;
return Objects.equals(ids, other.ids)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(allowNoMatch, other.allowNoMatch);

View file

@ -19,7 +19,7 @@
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -31,7 +31,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GetDataFrameTransformResponse {
public class GetTransformResponse {
public static final ParseField TRANSFORMS = new ParseField("transforms");
public static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms");
@ -42,30 +42,30 @@ public class GetDataFrameTransformResponse {
new ConstructingObjectParser<>("invalid_transforms", true, args -> new InvalidTransforms((List<String>) args[0]));
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetDataFrameTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_data_frame_transform", true, args -> new GetDataFrameTransformResponse(
(List<DataFrameTransformConfig>) args[0], (int) args[1], (InvalidTransforms) args[2]));
static final ConstructingObjectParser<GetTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_transform", true, args -> new GetTransformResponse(
(List<TransformConfig>) args[0], (int) args[1], (InvalidTransforms) args[2]));
static {
// Discard the count field which is the size of the transforms array
INVALID_TRANSFORMS_PARSER.declareInt((a, b) -> {}, COUNT);
INVALID_TRANSFORMS_PARSER.declareStringArray(constructorArg(), TRANSFORMS);
PARSER.declareObjectArray(constructorArg(), DataFrameTransformConfig.PARSER::apply, TRANSFORMS);
PARSER.declareObjectArray(constructorArg(), TransformConfig.PARSER::apply, TRANSFORMS);
PARSER.declareInt(constructorArg(), COUNT);
PARSER.declareObject(optionalConstructorArg(), INVALID_TRANSFORMS_PARSER::apply, INVALID_TRANSFORMS);
}
public static GetDataFrameTransformResponse fromXContent(final XContentParser parser) {
return GetDataFrameTransformResponse.PARSER.apply(parser, null);
public static GetTransformResponse fromXContent(final XContentParser parser) {
return GetTransformResponse.PARSER.apply(parser, null);
}
private List<DataFrameTransformConfig> transformConfigurations;
private List<TransformConfig> transformConfigurations;
private int count;
private InvalidTransforms invalidTransforms;
public GetDataFrameTransformResponse(List<DataFrameTransformConfig> transformConfigurations,
int count,
@Nullable InvalidTransforms invalidTransforms) {
public GetTransformResponse(List<TransformConfig> transformConfigurations,
int count,
@Nullable InvalidTransforms invalidTransforms) {
this.transformConfigurations = transformConfigurations;
this.count = count;
this.invalidTransforms = invalidTransforms;
@ -80,7 +80,7 @@ public class GetDataFrameTransformResponse {
return count;
}
public List<DataFrameTransformConfig> getTransformConfigurations() {
public List<TransformConfig> getTransformConfigurations() {
return transformConfigurations;
}
@ -99,7 +99,7 @@ public class GetDataFrameTransformResponse {
return false;
}
final GetDataFrameTransformResponse that = (GetDataFrameTransformResponse) other;
final GetTransformResponse that = (GetTransformResponse) other;
return Objects.equals(this.transformConfigurations, that.transformConfigurations)
&& Objects.equals(this.count, that.count)
&& Objects.equals(this.invalidTransforms, that.invalidTransforms);

View file

@ -26,12 +26,12 @@ import org.elasticsearch.client.core.PageParams;
import java.util.Objects;
import java.util.Optional;
public class GetDataFrameTransformStatsRequest implements Validatable {
public class GetTransformStatsRequest implements Validatable {
private final String id;
private PageParams pageParams;
private Boolean allowNoMatch;
public GetDataFrameTransformStatsRequest(String id) {
public GetTransformStatsRequest(String id) {
this.id = id;
}
@ -59,7 +59,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("data frame transform id must not be null");
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
@ -80,7 +80,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj;
GetTransformStatsRequest other = (GetTransformStatsRequest) obj;
return Objects.equals(id, other.id)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(allowNoMatch, other.allowNoMatch);

View file

@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.client.transform.transforms.DataFrameTransformStats;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@ -34,19 +34,19 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GetDataFrameTransformStatsResponse {
public class GetTransformStatsResponse {
public static final ParseField TRANSFORMS = new ParseField("transforms");
public static final ParseField COUNT = new ParseField("count");
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<GetDataFrameTransformStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_data_frame_transform_stats_response", true,
args -> new GetDataFrameTransformStatsResponse((List<DataFrameTransformStats>) args[0],
static final ConstructingObjectParser<GetTransformStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_transform_stats_response", true,
args -> new GetTransformStatsResponse((List<TransformStats>) args[0],
(List<TaskOperationFailure>) args[1], (List<ElasticsearchException>) args[2]));
static {
PARSER.declareObjectArray(constructorArg(), DataFrameTransformStats.PARSER::apply, TRANSFORMS);
PARSER.declareObjectArray(constructorArg(), TransformStats.PARSER::apply, TRANSFORMS);
// Discard the count field which is the size of the transforms array
PARSER.declareInt((a, b) -> {}, COUNT);
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p),
@ -55,15 +55,15 @@ public class GetDataFrameTransformStatsResponse {
AcknowledgedTasksResponse.NODE_FAILURES);
}
public static GetDataFrameTransformStatsResponse fromXContent(final XContentParser parser) {
return GetDataFrameTransformStatsResponse.PARSER.apply(parser, null);
public static GetTransformStatsResponse fromXContent(final XContentParser parser) {
return GetTransformStatsResponse.PARSER.apply(parser, null);
}
private final List<DataFrameTransformStats> transformsStats;
private final List<TransformStats> transformsStats;
private final List<TaskOperationFailure> taskFailures;
private final List<ElasticsearchException> nodeFailures;
public GetDataFrameTransformStatsResponse(List<DataFrameTransformStats> transformsStats,
public GetTransformStatsResponse(List<TransformStats> transformsStats,
@Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
this.transformsStats = transformsStats;
@ -71,7 +71,7 @@ public class GetDataFrameTransformStatsResponse {
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures);
}
public List<DataFrameTransformStats> getTransformsStats() {
public List<TransformStats> getTransformsStats() {
return transformsStats;
}
@ -98,7 +98,7 @@ public class GetDataFrameTransformStatsResponse {
return false;
}
final GetDataFrameTransformStatsResponse that = (GetDataFrameTransformStatsResponse) other;
final GetTransformStatsResponse that = (GetTransformStatsResponse) other;
return Objects.equals(this.transformsStats, that.transformsStats)
&& Objects.equals(this.nodeFailures, that.nodeFailures)
&& Objects.equals(this.taskFailures, that.taskFailures);

View file

@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -30,15 +30,15 @@ import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class PreviewDataFrameTransformRequest implements ToXContentObject, Validatable {
public class PreviewTransformRequest implements ToXContentObject, Validatable {
private final DataFrameTransformConfig config;
private final TransformConfig config;
public PreviewDataFrameTransformRequest(DataFrameTransformConfig config) {
public PreviewTransformRequest(TransformConfig config) {
this.config = config;
}
public DataFrameTransformConfig getConfig() {
public TransformConfig getConfig() {
return config;
}
@ -51,11 +51,11 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();
if (config == null) {
validationException.addValidationError("preview requires a non-null data frame config");
validationException.addValidationError("preview requires a non-null transform config");
return Optional.of(validationException);
} else {
if (config.getSource() == null) {
validationException.addValidationError("data frame transform source cannot be null");
validationException.addValidationError("transform source cannot be null");
}
}
@ -79,7 +79,7 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid
if (getClass() != obj.getClass()) {
return false;
}
PreviewDataFrameTransformRequest other = (PreviewDataFrameTransformRequest) obj;
PreviewTransformRequest other = (PreviewTransformRequest) obj;
return Objects.equals(config, other.config);
}
}

View file

@ -26,23 +26,23 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
public class PreviewDataFrameTransformResponse {
public class PreviewTransformResponse {
private static final String PREVIEW = "preview";
private static final String MAPPINGS = "mappings";
@SuppressWarnings("unchecked")
public static PreviewDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException {
Map<String, Object> previewMap = parser.mapOrdered();
Object previewDocs = previewMap.get(PREVIEW);
Object mappings = previewMap.get(MAPPINGS);
return new PreviewDataFrameTransformResponse((List<Map<String, Object>>) previewDocs, (Map<String, Object>) mappings);
return new PreviewTransformResponse((List<Map<String, Object>>) previewDocs, (Map<String, Object>) mappings);
}
private List<Map<String, Object>> docs;
private Map<String, Object> mappings;
public PreviewDataFrameTransformResponse(List<Map<String, Object>> docs, Map<String, Object> mappings) {
public PreviewTransformResponse(List<Map<String, Object>> docs, Map<String, Object> mappings) {
this.docs = docs;
this.mappings = mappings;
}
@ -65,7 +65,7 @@ public class PreviewDataFrameTransformResponse {
return false;
}
PreviewDataFrameTransformResponse other = (PreviewDataFrameTransformResponse) obj;
PreviewTransformResponse other = (PreviewTransformResponse) obj;
return Objects.equals(other.docs, docs) && Objects.equals(other.mappings, mappings);
}

View file

@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -29,17 +29,17 @@ import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class PutDataFrameTransformRequest implements ToXContentObject, Validatable {
public class PutTransformRequest implements ToXContentObject, Validatable {
public static final String DEFER_VALIDATION = "defer_validation";
private final DataFrameTransformConfig config;
private final TransformConfig config;
private Boolean deferValidation;
public PutDataFrameTransformRequest(DataFrameTransformConfig config) {
public PutTransformRequest(TransformConfig config) {
this.config = config;
}
public DataFrameTransformConfig getConfig() {
public TransformConfig getConfig() {
return config;
}
@ -60,17 +60,17 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();
if (config == null) {
validationException.addValidationError("put requires a non-null data frame config");
validationException.addValidationError("put requires a non-null transform config");
return Optional.of(validationException);
} else {
if (config.getId() == null) {
validationException.addValidationError("data frame transform id cannot be null");
validationException.addValidationError("transform id cannot be null");
}
if (config.getSource() == null) {
validationException.addValidationError("data frame transform source cannot be null");
validationException.addValidationError("transform source cannot be null");
}
if (config.getDestination() == null) {
validationException.addValidationError("data frame transform destination cannot be null");
validationException.addValidationError("transform destination cannot be null");
}
}
@ -99,7 +99,7 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab
if (getClass() != obj.getClass()) {
return false;
}
PutDataFrameTransformRequest other = (PutDataFrameTransformRequest) obj;
PutTransformRequest other = (PutTransformRequest) obj;
return Objects.equals(config, other.config);
}
}

View file

@ -26,16 +26,16 @@ import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
import java.util.Optional;
public class StartDataFrameTransformRequest implements Validatable {
public class StartTransformRequest implements Validatable {
private final String id;
private TimeValue timeout;
public StartDataFrameTransformRequest(String id) {
public StartTransformRequest(String id) {
this.id = id;
}
public StartDataFrameTransformRequest(String id, TimeValue timeout) {
public StartTransformRequest(String id, TimeValue timeout) {
this.id = id;
this.timeout = timeout;
}
@ -56,7 +56,7 @@ public class StartDataFrameTransformRequest implements Validatable {
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("data frame transform id must not be null");
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
@ -77,7 +77,7 @@ public class StartDataFrameTransformRequest implements Validatable {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
StartDataFrameTransformRequest other = (StartDataFrameTransformRequest) obj;
StartTransformRequest other = (StartTransformRequest) obj;
return Objects.equals(this.id, other.id)
&& Objects.equals(this.timeout, other.timeout);
}

View file

@ -28,20 +28,20 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse {
public class StartTransformResponse extends AcknowledgedTasksResponse {
private static final String ACKNOWLEDGED = "acknowledged";
private static final ConstructingObjectParser<StartDataFrameTransformResponse, Void> PARSER =
AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new,
private static final ConstructingObjectParser<StartTransformResponse, Void> PARSER =
AcknowledgedTasksResponse.generateParser("start_transform_response", StartTransformResponse::new,
ACKNOWLEDGED);
public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
public static StartTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public StartDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
public StartTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
super(acknowledged, taskFailures, nodeFailures);
}

View file

@ -26,20 +26,20 @@ import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
import java.util.Optional;
public class StopDataFrameTransformRequest implements Validatable {
public class StopTransformRequest implements Validatable {
private final String id;
private Boolean waitForCompletion;
private TimeValue timeout;
private Boolean allowNoMatch;
public StopDataFrameTransformRequest(String id) {
public StopTransformRequest(String id) {
this.id = id;
waitForCompletion = null;
timeout = null;
}
public StopDataFrameTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) {
public StopTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) {
this.id = id;
this.waitForCompletion = waitForCompletion;
this.timeout = timeout;
@ -77,7 +77,7 @@ public class StopDataFrameTransformRequest implements Validatable {
public Optional<ValidationException> validate() {
if (id == null) {
ValidationException validationException = new ValidationException();
validationException.addValidationError("data frame transform id must not be null");
validationException.addValidationError("transform id must not be null");
return Optional.of(validationException);
} else {
return Optional.empty();
@ -98,7 +98,7 @@ public class StopDataFrameTransformRequest implements Validatable {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
StopDataFrameTransformRequest other = (StopDataFrameTransformRequest) obj;
StopTransformRequest other = (StopTransformRequest) obj;
return Objects.equals(this.id, other.id)
&& Objects.equals(this.waitForCompletion, other.waitForCompletion)
&& Objects.equals(this.timeout, other.timeout)

View file

@ -28,19 +28,19 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse {
public class StopTransformResponse extends AcknowledgedTasksResponse {
private static final String ACKNOWLEDGED = "acknowledged";
private static final ConstructingObjectParser<StopDataFrameTransformResponse, Void> PARSER = AcknowledgedTasksResponse
.generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, ACKNOWLEDGED);
private static final ConstructingObjectParser<StopTransformResponse, Void> PARSER = AcknowledgedTasksResponse
.generateParser("stop_transform_response", StopTransformResponse::new, ACKNOWLEDGED);
public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
public static StopTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public StopDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
public StopTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) {
super(acknowledged, taskFailures, nodeFailures);
}

View file

@ -28,7 +28,7 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider;
import java.util.Arrays;
import java.util.List;
public class DataFrameNamedXContentProvider implements NamedXContentProvider {
public class TransformNamedXContentProvider implements NamedXContentProvider {
@Override
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {

View file

@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -29,18 +29,18 @@ import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
public class UpdateDataFrameTransformRequest implements ToXContentObject, Validatable {
public class UpdateTransformRequest implements ToXContentObject, Validatable {
private final DataFrameTransformConfigUpdate update;
private final TransformConfigUpdate update;
private final String id;
private Boolean deferValidation;
public UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate update, String id) {
public UpdateTransformRequest(TransformConfigUpdate update, String id) {
this.update = update;
this.id = id;
}
public DataFrameTransformConfigUpdate getUpdate() {
public TransformConfigUpdate getUpdate() {
return update;
}
@ -65,10 +65,10 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida
public Optional<ValidationException> validate() {
ValidationException validationException = new ValidationException();
if (update == null) {
validationException.addValidationError("put requires a non-null data frame config update object");
validationException.addValidationError("put requires a non-null transform config update object");
}
if (id == null) {
validationException.addValidationError("data frame transform id cannot be null");
validationException.addValidationError("transform id cannot be null");
}
if (validationException.validationErrors().isEmpty()) {
return Optional.empty();
@ -95,7 +95,7 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida
if (getClass() != obj.getClass()) {
return false;
}
UpdateDataFrameTransformRequest other = (UpdateDataFrameTransformRequest) obj;
UpdateTransformRequest other = (UpdateTransformRequest) obj;
return Objects.equals(update, other.update)
&& Objects.equals(id, other.id)
&& Objects.equals(deferValidation, other.deferValidation);

View file

@ -19,24 +19,24 @@
package org.elasticsearch.client.transform;
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.Objects;
public class UpdateDataFrameTransformResponse {
public class UpdateTransformResponse {
public static UpdateDataFrameTransformResponse fromXContent(final XContentParser parser) {
return new UpdateDataFrameTransformResponse(DataFrameTransformConfig.PARSER.apply(parser, null));
public static UpdateTransformResponse fromXContent(final XContentParser parser) {
return new UpdateTransformResponse(TransformConfig.PARSER.apply(parser, null));
}
private DataFrameTransformConfig transformConfiguration;
private TransformConfig transformConfiguration;
public UpdateDataFrameTransformResponse(DataFrameTransformConfig transformConfiguration) {
public UpdateTransformResponse(TransformConfig transformConfiguration) {
this.transformConfiguration = transformConfiguration;
}
public DataFrameTransformConfig getTransformConfiguration() {
public TransformConfig getTransformConfiguration() {
return transformConfiguration;
}
@ -55,7 +55,7 @@ public class UpdateDataFrameTransformResponse {
return false;
}
final UpdateDataFrameTransformResponse that = (UpdateDataFrameTransformResponse) other;
final UpdateTransformResponse that = (UpdateTransformResponse) other;
return Objects.equals(this.transformConfiguration, that.transformConfiguration);
}
}

View file

@ -31,14 +31,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Configuration containing the destination index for the {@link DataFrameTransformConfig}
* Configuration containing the destination index for the {@link TransformConfig}
*/
public class DestConfig implements ToXContentObject {
public static final ParseField INDEX = new ParseField("index");
public static final ParseField PIPELINE = new ParseField("pipeline");
public static final ConstructingObjectParser<DestConfig, Void> PARSER = new ConstructingObjectParser<>("data_frame_config_dest",
public static final ConstructingObjectParser<DestConfig, Void> PARSER = new ConstructingObjectParser<>("transform_config_dest",
true,
args -> new DestConfig((String)args[0], (String)args[1]));

View file

@ -29,7 +29,7 @@ import java.io.IOException;
import java.util.Objects;
/**
* Object for encapsulating the desired Query for a DataFrameTransform
* Object for encapsulating the desired Query for a Transform
*/
public class QueryConfig implements ToXContentObject {

View file

@ -35,14 +35,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
/**
* Class encapsulating all options for a {@link DataFrameTransformConfig} gathering data
* Class encapsulating all options for a {@link TransformConfig} gathering data
*/
public class SourceConfig implements ToXContentObject {
public static final ParseField QUERY = new ParseField("query");
public static final ParseField INDEX = new ParseField("index");
public static final ConstructingObjectParser<SourceConfig, Void> PARSER = new ConstructingObjectParser<>("data_frame_config_source",
public static final ConstructingObjectParser<SourceConfig, Void> PARSER = new ConstructingObjectParser<>("transform_config_source",
true,
args -> {
@SuppressWarnings("unchecked")

View file

@ -28,7 +28,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformCheckpointStats {
public class TransformCheckpointStats {
public static final ParseField CHECKPOINT = new ParseField("checkpoint");
public static final ParseField POSITION = new ParseField("position");
@ -36,40 +36,40 @@ public class DataFrameTransformCheckpointStats {
public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis");
public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis");
public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L);
public static final TransformCheckpointStats EMPTY = new TransformCheckpointStats(0L, null, null, 0L, 0L);
private final long checkpoint;
private final DataFrameIndexerPosition position;
private final DataFrameTransformProgress checkpointProgress;
private final TransformIndexerPosition position;
private final TransformProgress checkpointProgress;
private final long timestampMillis;
private final long timeUpperBoundMillis;
public static final ConstructingObjectParser<DataFrameTransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
"data_frame_transform_checkpoint_stats", true, args -> {
public static final ConstructingObjectParser<TransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
"transform_checkpoint_stats", true, args -> {
long checkpoint = args[0] == null ? 0L : (Long) args[0];
DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1];
DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2];
TransformIndexerPosition position = (TransformIndexerPosition) args[1];
TransformProgress checkpointProgress = (TransformProgress) args[2];
long timestamp = args[3] == null ? 0L : (Long) args[3];
long timeUpperBound = args[4] == null ? 0L : (Long) args[4];
return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
return new TransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
});
static {
LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT);
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, POSITION);
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, CHECKPOINT_PROGRESS);
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformIndexerPosition.PARSER, POSITION);
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformProgress.PARSER, CHECKPOINT_PROGRESS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS);
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS);
}
public static DataFrameTransformCheckpointStats fromXContent(XContentParser parser) throws IOException {
public static TransformCheckpointStats fromXContent(XContentParser parser) throws IOException {
return LENIENT_PARSER.parse(parser, null);
}
public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position,
final DataFrameTransformProgress checkpointProgress, final long timestampMillis,
final long timeUpperBoundMillis) {
public TransformCheckpointStats(final long checkpoint, final TransformIndexerPosition position,
final TransformProgress checkpointProgress, final long timestampMillis,
final long timeUpperBoundMillis) {
this.checkpoint = checkpoint;
this.position = position;
this.checkpointProgress = checkpointProgress;
@ -81,11 +81,11 @@ public class DataFrameTransformCheckpointStats {
return checkpoint;
}
public DataFrameIndexerPosition getPosition() {
public TransformIndexerPosition getPosition() {
return position;
}
public DataFrameTransformProgress getCheckpointProgress() {
public TransformProgress getCheckpointProgress() {
return checkpointProgress;
}
@ -112,7 +112,7 @@ public class DataFrameTransformCheckpointStats {
return false;
}
DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other;
TransformCheckpointStats that = (TransformCheckpointStats) other;
return this.checkpoint == that.checkpoint
&& Objects.equals(this.position, that.position)

View file

@ -29,37 +29,37 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.time.Instant;
import java.util.Objects;
public class DataFrameTransformCheckpointingInfo {
public class TransformCheckpointingInfo {
public static final ParseField LAST_CHECKPOINT = new ParseField("last", "current");
public static final ParseField NEXT_CHECKPOINT = new ParseField("next", "in_progress");
public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind");
public static final ParseField CHANGES_LAST_DETECTED_AT = new ParseField("changes_last_detected_at");
private final DataFrameTransformCheckpointStats last;
private final DataFrameTransformCheckpointStats next;
private final TransformCheckpointStats last;
private final TransformCheckpointStats next;
private final long operationsBehind;
private final Instant changesLastDetectedAt;
private static final ConstructingObjectParser<DataFrameTransformCheckpointingInfo, Void> LENIENT_PARSER =
private static final ConstructingObjectParser<TransformCheckpointingInfo, Void> LENIENT_PARSER =
new ConstructingObjectParser<>(
"data_frame_transform_checkpointing_info",
"transform_checkpointing_info",
true,
a -> {
long behind = a[2] == null ? 0L : (Long) a[2];
Instant changesLastDetectedAt = (Instant)a[3];
return new DataFrameTransformCheckpointingInfo(
a[0] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[0],
a[1] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[1],
return new TransformCheckpointingInfo(
a[0] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[0],
a[1] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[1],
behind,
changesLastDetectedAt);
});
static {
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT);
(p, c) -> TransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT);
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT);
(p, c) -> TransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT);
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND);
LENIENT_PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
p -> TimeUtil.parseTimeFieldToInstant(p, CHANGES_LAST_DETECTED_AT.getPreferredName()),
@ -67,21 +67,21 @@ public class DataFrameTransformCheckpointingInfo {
ObjectParser.ValueType.VALUE);
}
public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last,
DataFrameTransformCheckpointStats next,
long operationsBehind,
Instant changesLastDetectedAt) {
public TransformCheckpointingInfo(TransformCheckpointStats last,
TransformCheckpointStats next,
long operationsBehind,
Instant changesLastDetectedAt) {
this.last = Objects.requireNonNull(last);
this.next = Objects.requireNonNull(next);
this.operationsBehind = operationsBehind;
this.changesLastDetectedAt = changesLastDetectedAt;
}
public DataFrameTransformCheckpointStats getLast() {
public TransformCheckpointStats getLast() {
return last;
}
public DataFrameTransformCheckpointStats getNext() {
public TransformCheckpointStats getNext() {
return next;
}
@ -94,7 +94,7 @@ public class DataFrameTransformCheckpointingInfo {
return changesLastDetectedAt;
}
public static DataFrameTransformCheckpointingInfo fromXContent(XContentParser p) {
public static TransformCheckpointingInfo fromXContent(XContentParser p) {
return LENIENT_PARSER.apply(p, null);
}
@ -113,7 +113,7 @@ public class DataFrameTransformCheckpointingInfo {
return false;
}
DataFrameTransformCheckpointingInfo that = (DataFrameTransformCheckpointingInfo) other;
TransformCheckpointingInfo that = (TransformCheckpointingInfo) other;
return Objects.equals(this.last, that.last) &&
Objects.equals(this.next, that.next) &&

View file

@ -40,7 +40,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformConfig implements ToXContentObject {
public class TransformConfig implements ToXContentObject {
public static final ParseField ID = new ParseField("id");
public static final ParseField SOURCE = new ParseField("source");
@ -63,8 +63,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
private final Version transformVersion;
private final Instant createTime;
public static final ConstructingObjectParser<DataFrameTransformConfig, Void> PARSER =
new ConstructingObjectParser<>("data_frame_transform", true,
public static final ConstructingObjectParser<TransformConfig, Void> PARSER =
new ConstructingObjectParser<>("transform", true,
(args) -> {
String id = (String) args[0];
SourceConfig source = (SourceConfig) args[1];
@ -75,7 +75,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
String description = (String)args[6];
Instant createTime = (Instant)args[7];
String transformVersion = (String)args[8];
return new DataFrameTransformConfig(id,
return new TransformConfig(id,
source,
dest,
frequency,
@ -109,34 +109,34 @@ public class DataFrameTransformConfig implements ToXContentObject {
}
public static DataFrameTransformConfig fromXContent(final XContentParser parser) {
public static TransformConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* Helper method for previewing a data frame transform configuration
* Helper method for previewing a transform configuration
*
* The DataFrameTransformConfig returned from this method should only be used for previewing the resulting data.
* The TransformConfig returned from this method should only be used for previewing the resulting data.
*
* A new, valid, DataFrameTransformConfig with an appropriate destination and ID will have to be constructed to create
* A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create
* the transform.
* @param source Source configuration for gathering the data
* @param pivotConfig Pivot config to preview
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new DataFrameTransformConfig(null, source, null, null, null, pivotConfig, null, null, null);
public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null);
}
DataFrameTransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final PivotConfig pivotConfig,
final String description,
final Instant createTime,
final String version) {
TransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final PivotConfig pivotConfig,
final String description,
final Instant createTime,
final String version) {
this.id = id;
this.source = source;
this.dest = dest;
@ -231,7 +231,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
return false;
}
final DataFrameTransformConfig that = (DataFrameTransformConfig) other;
final TransformConfig that = (TransformConfig) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)
@ -303,8 +303,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
return this;
}
public DataFrameTransformConfig build() {
return new DataFrameTransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null);
public TransformConfig build() {
return new TransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null);
}
}
}

View file

@ -34,30 +34,30 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* This class holds the mutable configuration items for a data frame transform
* This class holds the mutable configuration items for a transform
*/
public class DataFrameTransformConfigUpdate implements ToXContentObject {
public class TransformConfigUpdate implements ToXContentObject {
public static final String NAME = "data_frame_transform_config_update";
private static final ConstructingObjectParser<DataFrameTransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(NAME,
public static final String NAME = "transform_config_update";
private static final ConstructingObjectParser<TransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(NAME,
false,
(args) -> {
SourceConfig source = (SourceConfig) args[0];
DestConfig dest = (DestConfig) args[1];
TimeValue frequency = args[2] == null ?
null :
TimeValue.parseTimeValue((String) args[2], DataFrameTransformConfig.FREQUENCY.getPreferredName());
TimeValue.parseTimeValue((String) args[2], TransformConfig.FREQUENCY.getPreferredName());
SyncConfig syncConfig = (SyncConfig) args[3];
String description = (String) args[4];
return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description);
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description);
});
static {
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), DataFrameTransformConfig.SOURCE);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DataFrameTransformConfig.DEST);
PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.FREQUENCY);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), DataFrameTransformConfig.SYNC);
PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), TransformConfig.SOURCE);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), TransformConfig.DEST);
PARSER.declareString(optionalConstructorArg(), TransformConfig.FREQUENCY);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), TransformConfig.SYNC);
PARSER.declareString(optionalConstructorArg(), TransformConfig.DESCRIPTION);
}
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
@ -74,11 +74,11 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
private final SyncConfig syncConfig;
private final String description;
public DataFrameTransformConfigUpdate(final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final String description){
public TransformConfigUpdate(final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final String description) {
this.source = source;
this.dest = dest;
this.frequency = frequency;
@ -111,21 +111,21 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
if (source != null) {
builder.field(DataFrameTransformConfig.SOURCE.getPreferredName(), source);
builder.field(TransformConfig.SOURCE.getPreferredName(), source);
}
if (dest != null) {
builder.field(DataFrameTransformConfig.DEST.getPreferredName(), dest);
builder.field(TransformConfig.DEST.getPreferredName(), dest);
}
if (frequency != null) {
builder.field(DataFrameTransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
builder.field(TransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(DataFrameTransformConfig.SYNC.getPreferredName());
builder.startObject(TransformConfig.SYNC.getPreferredName());
builder.field(syncConfig.getName(), syncConfig);
builder.endObject();
}
if (description != null) {
builder.field(DataFrameTransformConfig.DESCRIPTION.getPreferredName(), description);
builder.field(TransformConfig.DESCRIPTION.getPreferredName(), description);
}
builder.endObject();
return builder;
@ -141,7 +141,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
return false;
}
final DataFrameTransformConfigUpdate that = (DataFrameTransformConfigUpdate) other;
final TransformConfigUpdate that = (TransformConfigUpdate) other;
return Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
@ -164,7 +164,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
return new Builder();
}
public static DataFrameTransformConfigUpdate fromXContent(final XContentParser parser) {
public static TransformConfigUpdate fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
@ -201,8 +201,8 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
return this;
}
public DataFrameTransformConfigUpdate build() {
return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description);
public TransformConfigUpdate build() {
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description);
}
}
}

View file

@ -37,7 +37,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
* indexer_position: the position of the indexer querying the source
* bucket_position: the position used for identifying changes
*/
public class DataFrameIndexerPosition {
public class TransformIndexerPosition {
public static final ParseField INDEXER_POSITION = new ParseField("indexer_position");
public static final ParseField BUCKET_POSITION = new ParseField("bucket_position");
@ -45,17 +45,17 @@ public class DataFrameIndexerPosition {
private final Map<String, Object> bucketPosition;
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<DataFrameIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_indexer_position",
public static final ConstructingObjectParser<TransformIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(
"transform_indexer_position",
true,
args -> new DataFrameIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
args -> new TransformIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
static {
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT);
}
public DataFrameIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
public TransformIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition);
this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition);
}
@ -78,7 +78,7 @@ public class DataFrameIndexerPosition {
return false;
}
DataFrameIndexerPosition that = (DataFrameIndexerPosition) other;
TransformIndexerPosition that = (TransformIndexerPosition) other;
return Objects.equals(this.indexerPosition, that.indexerPosition) &&
Objects.equals(this.bucketPosition, that.bucketPosition);
@ -89,7 +89,7 @@ public class DataFrameIndexerPosition {
return Objects.hash(indexerPosition, bucketPosition);
}
public static DataFrameIndexerPosition fromXContent(XContentParser parser) {
public static TransformIndexerPosition fromXContent(XContentParser parser) {
try {
return PARSER.parse(parser, null);
} catch (IOException e) {

View file

@ -30,16 +30,16 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameIndexerTransformStats extends IndexerJobStats {
public class TransformIndexerStats extends IndexerJobStats {
static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms");
static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed");
static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed");
public static final ConstructingObjectParser<DataFrameIndexerTransformStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
public static final ConstructingObjectParser<TransformIndexerStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
NAME,
true,
args -> new DataFrameIndexerTransformStats((long) args[0], (long) args[1], (long) args[2],
args -> new TransformIndexerStats((long) args[0], (long) args[1], (long) args[2],
(long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9],
(Double) args[10], (Double) args[11], (Double) args[12]));
@ -59,7 +59,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_PROCESSED);
}
public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) throws IOException {
public static TransformIndexerStats fromXContent(XContentParser parser) throws IOException {
return LENIENT_PARSER.parse(parser, null);
}
@ -67,11 +67,11 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
private final double expAvgDocumentsIndexed;
private final double expAvgDocumentsProcessed;
public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOuputDocuments,
long numInvocations, long indexTime, long searchTime,
long indexTotal, long searchTotal, long indexFailures, long searchFailures,
Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed,
Double expAvgDocumentsProcessed) {
public TransformIndexerStats(long numPages, long numInputDocuments, long numOuputDocuments,
long numInvocations, long indexTime, long searchTime,
long indexTotal, long searchTotal, long indexFailures, long searchFailures,
Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed,
Double expAvgDocumentsProcessed) {
super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime,
indexTotal, searchTotal, indexFailures, searchFailures);
this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs == null ? 0.0 : expAvgCheckpointDurationMs;
@ -101,7 +101,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
return false;
}
DataFrameIndexerTransformStats that = (DataFrameIndexerTransformStats) other;
TransformIndexerStats that = (TransformIndexerStats) other;
return Objects.equals(this.numPages, that.numPages)
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)

View file

@ -28,7 +28,7 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DataFrameTransformProgress {
public class TransformProgress {
public static final ParseField TOTAL_DOCS = new ParseField("total_docs");
public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining");
@ -36,10 +36,10 @@ public class DataFrameTransformProgress {
public static final ParseField DOCS_PROCESSED = new ParseField("docs_processed");
public static final ParseField DOCS_INDEXED = new ParseField("docs_indexed");
public static final ConstructingObjectParser<DataFrameTransformProgress, Void> PARSER = new ConstructingObjectParser<>(
"data_frame_transform_progress",
public static final ConstructingObjectParser<TransformProgress, Void> PARSER = new ConstructingObjectParser<>(
"transform_progress",
true,
a -> new DataFrameTransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4]));
a -> new TransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4]));
static {
PARSER.declareLong(optionalConstructorArg(), TOTAL_DOCS);
@ -49,7 +49,7 @@ public class DataFrameTransformProgress {
PARSER.declareLong(optionalConstructorArg(), DOCS_INDEXED);
}
public static DataFrameTransformProgress fromXContent(XContentParser parser) {
public static TransformProgress fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
@ -59,11 +59,11 @@ public class DataFrameTransformProgress {
private final long documentsProcessed;
private final long documentsIndexed;
public DataFrameTransformProgress(Long totalDocs,
Long remainingDocs,
Double percentComplete,
Long documentsProcessed,
Long documentsIndexed) {
public TransformProgress(Long totalDocs,
Long remainingDocs,
Double percentComplete,
Long documentsProcessed,
Long documentsIndexed) {
this.totalDocs = totalDocs;
this.remainingDocs = remainingDocs == null ? totalDocs : remainingDocs;
this.percentComplete = percentComplete;
@ -104,7 +104,7 @@ public class DataFrameTransformProgress {
return false;
}
DataFrameTransformProgress that = (DataFrameTransformProgress) other;
TransformProgress that = (TransformProgress) other;
return Objects.equals(this.remainingDocs, that.remainingDocs)
&& Objects.equals(this.totalDocs, that.totalDocs)
&& Objects.equals(this.percentComplete, that.percentComplete)

Some files were not shown because too many files have changed in this diff Show more